| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
| |
|
|
| import torch |
|
|
| from nemo.utils.env_var_parsing import get_envint |
|
|
|
|
| def is_global_rank_zero(): |
| """Helper function to determine if the current process is global_rank 0 (the main process)""" |
| |
| |
| rank = get_envint("RANK", None) |
| if rank is not None: |
| return rank == 0 |
|
|
| |
| |
| slurm_rank = get_envint("SLURM_PROCID", None) |
| if slurm_rank is not None: |
| return slurm_rank == 0 |
|
|
| |
| mpi_rank = get_envint("OMPI_COMM_WORLD_RANK", None) |
| if mpi_rank is not None: |
| return mpi_rank == 0 |
|
|
| |
| |
| |
| node_rank = get_envint("NODE_RANK", get_envint("GROUP_RANK", 0)) |
| local_rank = get_envint("LOCAL_RANK", 0) |
| return node_rank == 0 and local_rank == 0 |
|
|
|
|
| def get_rank(): |
| """Helper function that returns torch.distributed.get_rank() if DDP has been initialized otherwise it returns 0.""" |
|
|
| if is_global_rank_zero(): |
| return 0 |
| else: |
| return torch.distributed.get_rank() |
|
|
|
|
| def get_last_rank() -> int: |
| """Get the last rank in the distributed group""" |
| if not torch.distributed.is_initialized(): |
| return 0 |
| return torch.distributed.get_world_size() - 1 |
|
|