Skip to content

Commit

Permalink
fixbug: torch 2.0 dist train with error local rank (#1600)
Browse files Browse the repository at this point in the history
  • Loading branch information
jihanyang authored Apr 15, 2024
1 parent 255db8f commit 839d8dd
Showing 1 changed file with 4 additions and 1 deletion.
5 changes: 4 additions & 1 deletion tools/train.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ def parse_config():
parser.add_argument('--sync_bn', action='store_true', default=False, help='whether to use sync bn')
parser.add_argument('--fix_random_seed', action='store_true', default=False, help='')
parser.add_argument('--ckpt_save_interval', type=int, default=1, help='number of training epochs')
parser.add_argument('--local_rank', type=int, default=0, help='local rank for distributed training')
parser.add_argument('--local_rank', type=int, default=None, help='local rank for distributed training')
parser.add_argument('--max_ckpt_save_num', type=int, default=30, help='max number of saved checkpoint')
parser.add_argument('--merge_all_iters_to_one_epoch', action='store_true', default=False, help='')
parser.add_argument('--set', dest='set_cfgs', default=None, nargs=argparse.REMAINDER,
Expand Down Expand Up @@ -71,6 +71,9 @@ def main():
dist_train = False
total_gpus = 1
else:
if args.local_rank is None:
args.local_rank = int(os.environ.get('LOCAL_RANK', '0'))

total_gpus, cfg.LOCAL_RANK = getattr(common_utils, 'init_dist_%s' % args.launcher)(
args.tcp_port, args.local_rank, backend='nccl'
)
Expand Down

0 comments on commit 839d8dd

Please sign in to comment.