dist.py 2.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172
  1. # Ultralytics YOLO 🚀, AGPL-3.0 license
  2. import os
  3. import shutil
  4. import socket
  5. import sys
  6. import tempfile
  7. from . import USER_CONFIG_DIR
  8. from .torch_utils import TORCH_1_9
  9. def find_free_network_port() -> int:
  10. """
  11. Finds a free port on localhost.
  12. It is useful in single-node training when we don't want to connect to a real main node but have to set the
  13. `MASTER_PORT` environment variable.
  14. """
  15. with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s:
  16. s.bind(("127.0.0.1", 0))
  17. return s.getsockname()[1] # port
  18. def generate_ddp_file(trainer):
  19. """Generates a DDP file and returns its file name."""
  20. module, name = f"{trainer.__class__.__module__}.{trainer.__class__.__name__}".rsplit(".", 1)
  21. content = f"""
  22. # Ultralytics Multi-GPU training temp file (should be automatically deleted after use)
  23. overrides = {vars(trainer.args)}
  24. if __name__ == "__main__":
  25. from {module} import {name}
  26. from ultralytics.utils import DEFAULT_CFG_DICT
  27. cfg = DEFAULT_CFG_DICT.copy()
  28. cfg.update(save_dir='') # handle the extra key 'save_dir'
  29. trainer = {name}(cfg=cfg, overrides=overrides)
  30. trainer.args.model = "{getattr(trainer.hub_session, 'model_url', trainer.args.model)}"
  31. results = trainer.train()
  32. """
  33. (USER_CONFIG_DIR / "DDP").mkdir(exist_ok=True)
  34. with tempfile.NamedTemporaryFile(
  35. prefix="_temp_",
  36. suffix=f"{id(trainer)}.py",
  37. mode="w+",
  38. encoding="utf-8",
  39. dir=USER_CONFIG_DIR / "DDP",
  40. delete=False,
  41. ) as file:
  42. file.write(content)
  43. return file.name
  44. def generate_ddp_command(world_size, trainer):
  45. """Generates and returns command for distributed training."""
  46. import __main__ # noqa local import to avoid https://github.com/Lightning-AI/lightning/issues/15218
  47. if not trainer.resume:
  48. shutil.rmtree(trainer.save_dir) # remove the save_dir
  49. file = generate_ddp_file(trainer)
  50. dist_cmd = "torch.distributed.run" if TORCH_1_9 else "torch.distributed.launch"
  51. port = find_free_network_port()
  52. cmd = [sys.executable, "-m", dist_cmd, "--nproc_per_node", f"{world_size}", "--master_port", f"{port}", file]
  53. return cmd, file
  54. def ddp_cleanup(trainer, file):
  55. """Delete temp file if created."""
  56. if f"{id(trainer)}.py" in file: # if temp_file suffix in file
  57. os.remove(file)