Skip to content
Snippets Groups Projects
Commit d651412b authored by xuetaowave's avatar xuetaowave
Browse files

update

parent d648a75b
No related branches found
No related tags found
No related merge requests found
import os
from datetime import timedelta
os.environ['MASTER_ADDR'] = '0'
os.environ['RANK'] = '0'
......@@ -9,5 +10,5 @@ os.environ['WANDB_START_METHOD'] = 'thread'
os.environ['MASTER_PORT'] = '19500'
import torch.distributed as dist
dist.init_process_group()
dist.init_process_group(timeout=timedelta(seconds=10))
pass
\ No newline at end of file
......@@ -46,6 +46,7 @@
import os
import time
from datetime import timedelta
import numpy as np
import argparse
import h5py
......@@ -558,7 +559,7 @@ if __name__ == '__main__':
local_rank = 0
if params['world_size'] > 1:
dist.init_process_group(backend='nccl',
init_method='env://')
init_method='env://', timeout=timedelta(seconds=10))
local_rank = int(os.environ["LOCAL_RANK"])
args.gpu = local_rank
world_rank = dist.get_rank()
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment