
    h7                     \    S SK r S SKrS SKrS SKrSSKJr  SSKJr  S\4S jr	S r
S rS	 rg)
    N   )USER_CONFIG_DIR)	TORCH_1_9returnc                      SSK n U R                  U R                  U R                  5       nUR                  S5        UR	                  5       S   sSSS5        $ ! , (       d  f       g= f)z
Find a free port on localhost.

It is useful in single-node training when we don't want to connect to a real main node but have to set the
`MASTER_PORT` environment variable.

Returns:
    (int): The available network port number.
r   N)z	127.0.0.1r   r   )socketAF_INETSOCK_STREAMbindgetsockname)r   ss     P/home/james-whalen/.local/lib/python3.13/site-packages/ultralytics/utils/dist.pyfind_free_network_portr      sH     	v~~v'9'9	:a	 }}q! 
;	:	:s   $A
A'c                 (   U R                   R                   SU R                   R                   3R                  SS5      u  pS[	        U R
                  5       SU SU SU S[        U R                  SU R
                  R                  5       S	3n[        S
-  R                  SS9  [        R                  " S[        U 5       S3SS[        S
-  SS9 nUR                  U5        SSS5        UR                  $ ! , (       d  f       WR                  $ = f)a  
Generate a DDP (Distributed Data Parallel) file for multi-GPU training.

This function creates a temporary Python file that enables distributed training across multiple GPUs.
The file contains the necessary configuration to initialize the trainer in a distributed environment.

Args:
    trainer (ultralytics.engine.trainer.BaseTrainer): The trainer containing training configuration and arguments.
        Must have args attribute and be a class instance.

Returns:
    (str): Path to the generated temporary DDP file.

Notes:
    The generated file is saved in the USER_CONFIG_DIR/DDP directory and includes:
    - Trainer class import
    - Configuration overrides from the trainer arguments
    - Model path configuration
    - Training initialization code
.r   zd
# Ultralytics Multi-GPU training temp file (should be automatically deleted after use)
overrides = z&

if __name__ == "__main__":
    from z import z
    from ultralytics.utils import DEFAULT_CFG_DICT

    cfg = DEFAULT_CFG_DICT.copy()
    cfg.update(save_dir='')   # handle the extra key 'save_dir'
    trainer = z9(cfg=cfg, overrides=overrides)
    trainer.args.model = "	model_urlz "
    results = trainer.train()
DDPT)exist_ok_temp_.pyzw+zutf-8F)prefixsuffixmodeencodingdirdeleteN)	__class__
__module____name__rsplitvarsargsgetattrhub_sessionmodelr   mkdirtempfileNamedTemporaryFileidwritename)trainermoduler+   contentfiles        r   generate_ddp_filer0      s#   * ''2231W5F5F5O5O4PQXXY\^_`LF',,  !
 $  
 f "7#6#6W\\EWEWXY ZG u##T#2		$	$W+c"e#
 


7
 99
 
 99s   C88
Dc                    SSK nU R                  (       d   [        R                  " U R                  5        [        U 5      n[        (       a  SOSn[        5       n[        R                  SUSU R                   SU U/nXR4$ )a=  
Generate command for distributed training.

Args:
    trainer (ultralytics.engine.trainer.BaseTrainer): The trainer containing configuration for distributed training.

Returns:
    cmd (list[str]): The command to execute for distributed training.
    file (str): Path to the temporary file created for DDP training.
r   Nztorch.distributed.runztorch.distributed.launchz-mz--nproc_per_nodez--master_port)__main__resumeshutilrmtreesave_dirr0   r   r   sys
executable
world_size)r,   r2   r/   dist_cmdportcmds         r   generate_ddp_commandr=   O   sw     >>g&&'W%D*3)&9SH!#D
&	C 9    c                 V    [        U 5       S3U;   a  [        R                  " U5        gg)a  
Delete temporary file if created during distributed data parallel (DDP) training.

This function checks if the provided file contains the trainer's ID in its name, indicating it was created
as a temporary file for DDP training, and deletes it if so.

Args:
    trainer (ultralytics.engine.trainer.BaseTrainer): The trainer used for distributed training.
    file (str): Path to the file that might need to be deleted.

Examples:
    >>> trainer = YOLOTrainer()
    >>> file = "/tmp/ddp_temp_123456789.py"
    >>> ddp_cleanup(trainer, file)
r   N)r)   osremove)r,   r/   s     r   ddp_cleanuprB   n   s'      W+cd"
		$ #r>   )r@   r4   r7   r'    r   torch_utilsr   intr   r0   r=   rB    r>   r   <module>rG      s5    
  
   "" ""/d>r>   