
    |hJ                     `    d dl Z d dlZd dlZd dlZddlmZ ddlmZ defdZ	d Z
defd	Zd
 Zy)    N   )USER_CONFIG_DIR)	TORCH_1_9returnc                      ddl } | j                  | j                  | j                        5 }|j                  d       |j	                         d   cddd       S # 1 sw Y   yxY w)a   
    Find a free port on localhost.

    It is useful in single-node training when we don't want to connect to a real main node but have to set the
    `MASTER_PORT` environment variable.

    Returns:
        (int): The available network port number.
    r   N)z	127.0.0.1r   r   )socketAF_INETSOCK_STREAMbindgetsockname)r   ss     U/var/www/html/test/engine/venv/lib/python3.12/site-packages/ultralytics/utils/dist.pyfind_free_network_portr      sQ     	v~~v'9'9	: "a	 }}q!" " "s   $AA"c                 *   | j                   j                   d| j                   j                   j                  dd      \  }}dt	        | j
                         d| d| d| dt        | j                  d| j
                  j                         d	}t        d
z  j                  d       t        j                  dt        |        dddt        d
z  d      5 }|j                  |       ddd       |j                  S # 1 sw Y   j                  S xY w)a6  
    Generate a DDP (Distributed Data Parallel) file for multi-GPU training.

    This function creates a temporary Python file that enables distributed training across multiple GPUs.
    The file contains the necessary configuration to initialize the trainer in a distributed environment.

    Args:
        trainer (ultralytics.engine.trainer.BaseTrainer): The trainer containing training configuration and arguments.
            Must have args attribute and be a class instance.

    Returns:
        (str): Path to the generated temporary DDP file.

    Notes:
        The generated file is saved in the USER_CONFIG_DIR/DDP directory and includes:
        - Trainer class import
        - Configuration overrides from the trainer arguments
        - Model path configuration
        - Training initialization code
    .r   zd
# Ultralytics Multi-GPU training temp file (should be automatically deleted after use)
overrides = z&

if __name__ == "__main__":
    from z import z
    from ultralytics.utils import DEFAULT_CFG_DICT

    cfg = DEFAULT_CFG_DICT.copy()
    cfg.update(save_dir='')   # handle the extra key 'save_dir'
    trainer = z9(cfg=cfg, overrides=overrides)
    trainer.args.model = "	model_urlz "
    results = trainer.train()
DDPT)exist_ok_temp_.pyzw+zutf-8F)prefixsuffixmodeencodingdirdeleteN)	__class__
__module____name__rsplitvarsargsgetattrhub_sessionmodelr   mkdirtempfileNamedTemporaryFileidwritename)trainermoduler+   contentfiles        r   generate_ddp_filer0      s%   * ''2231W5F5F5O5O4PQXXY\^_`LFD',,  !
 $  
 f "7#6#6W\\EWEWXY ZG u##T#2		$	$W+c"e#
  


7 99 99s   C>>D
world_sizec                     ddl }|j                  st        j                  |j                         t        |      }t        rdnd}t               }t        j                  d|d|  d| |g}||fS )a  
    Generate command for distributed training.

    Args:
        world_size (int): Number of processes to spawn for distributed training.
        trainer (ultralytics.engine.trainer.BaseTrainer): The trainer containing configuration for distributed training.

    Returns:
        cmd (List[str]): The command to execute for distributed training.
        file (str): Path to the temporary file created for DDP training.
    r   Nztorch.distributed.runztorch.distributed.launchz-mz--nproc_per_nodez--master_port)
__main__resumeshutilrmtreesave_dirr0   r   r   sys
executable)r1   r,   r3   r/   dist_cmdportcmds          r   generate_ddp_commandr=   O   sj     >>g&&'W%D*3&9SH!#D>>4+=*P_dhcilp
qC9    c                 P    t        |        d|v rt        j                  |       yy)aI  
    Delete temporary file if created during distributed data parallel (DDP) training.

    This function checks if the provided file contains the trainer's ID in its name, indicating it was created
    as a temporary file for DDP training, and deletes it if so.

    Args:
        trainer (ultralytics.engine.trainer.BaseTrainer): The trainer used for distributed training.
        file (str): Path to the file that might need to be deleted.

    Examples:
        >>> trainer = YOLOTrainer()
        >>> file = "/tmp/ddp_temp_123456789.py"
        >>> ddp_cleanup(trainer, file)
    r   N)r)   osremove)r,   r/   s     r   ddp_cleanuprB   f   s'      W+cd"
		$ #r>   )r@   r5   r8   r'    r   torch_utilsr   intr   r0   r=   rB    r>   r   <module>rG      s<    
  
   "" ""/dS .r>   