# Running on cnode7-018 # Started at Mon Dec 9 21:38:54 CST 2024 # SLURMD_NODENAME=cnode7-018 # SLURM_CLUSTER_NAME=slurm # SLURM_CONF=/cm/shared/apps/slurm/var/etc/slurm/slurm.conf # SLURM_CPUS_ON_NODE=224 # SLURM_CPUS_PER_TASK=128 # SLURM_EXPORT_ENV=PATH # SLURM_GET_USER_ENV=1 # SLURM_GPUS_ON_NODE=8 # SLURM_GPUS_PER_NODE=8 # SLURM_GTIDS=0 # SLURM_JOBID=6567 # SLURM_JOB_CPUS_PER_NODE='224(x2)' # SLURM_JOB_END_TIME=1765287532 # SLURM_JOB_GID=1026 # SLURM_JOB_GPUS=0,1,2,3,4,5,6,7 # SLURM_JOB_ID=6567 # SLURM_JOB_NAME=exp_owsm/s2t_train_05b_ds_raw_bpe50000/train.log # SLURM_JOB_NODELIST='cnode7-[018-019]' # SLURM_JOB_NUM_NODES=2 # SLURM_JOB_PARTITION=sa # SLURM_JOB_QOS=normal # SLURM_JOB_START_TIME=1733751532 # SLURM_JOB_UID=1026 # SLURM_JOB_USER=williamchen # SLURM_LOCALID=0 # SLURM_MEM_PER_NODE=2048000 # SLURM_NNODES=2 # SLURM_NODEID=0 # SLURM_NODELIST='cnode7-[018-019]' # SLURM_NODE_ALIASES='(null)' # SLURM_OPEN_MODE=a # SLURM_PRIO_PROCESS=0 # SLURM_PROCID=0 # SLURM_SUBMIT_DIR=/mnt/home/williamchen/espnet/egs2/owsm_v3.1/s2t1 # SLURM_SUBMIT_HOST=154-T2-P1-NVR # SLURM_TASKS_PER_NODE='1(x2)' # SLURM_TASK_PID=4121007 # SLURM_TOPOLOGY_ADDR=cnode7-018 # SLURM_TOPOLOGY_ADDR_PATTERN=node # SLURM_WORKING_CLUSTER=slurm:154-T2-P1-NVR:6817:9984:109 # srun --export=ALL python3 -m espnet2.bin.s2t_train --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev_v3/wav.scp,speech,kaldi_ark --valid_shape_file exp_owsm/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp_owsm/s2t_train_05b_ds_raw_bpe50000 --config conf/train_05b_ds.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp_owsm/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp,speech,kaldi_ark --train_shape_file exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev,text_prev,text --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc,text_ctc,text --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text,text,text --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev_v3/text.prev,text_prev,text --valid_data_path_and_name_and_type dump/raw/dev_v3/text.ctc,text_ctc,text --valid_data_path_and_name_and_type dump/raw/dev_v3/text,text,text --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///mnt/home/williamchen/espnet/egs2/owsm_v3.1/s2t1/exp_owsm/s2t_train_05b_ds_raw_bpe50000/.dist_init_58cb090c-176f-43a9-9bad-48e7526d7140 [2024-12-09 21:39:01,011] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:02,272] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/bin/python3 /mnt/home/williamchen/espnet/espnet2/bin/s2t_train.py --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev_v3/wav.scp,speech,kaldi_ark --valid_shape_file exp_owsm/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp_owsm/s2t_train_05b_ds_raw_bpe50000 --config conf/train_05b_ds.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp_owsm/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp,speech,kaldi_ark --train_shape_file exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev,text_prev,text --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc,text_ctc,text --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text,text,text --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev_v3/text.prev,text_prev,text --valid_data_path_and_name_and_type dump/raw/dev_v3/text.ctc,text_ctc,text --valid_data_path_and_name_and_type dump/raw/dev_v3/text,text,text --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///mnt/home/williamchen/espnet/egs2/owsm_v3.1/s2t1/exp_owsm/s2t_train_05b_ds_raw_bpe50000/.dist_init_58cb090c-176f-43a9-9bad-48e7526d7140 /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/bin/python3 /mnt/home/williamchen/espnet/espnet2/bin/s2t_train.py --use_preprocessor true --bpemodel data/token_list/bpe_unigram50000/bpe.model --token_type bpe --token_list data/token_list/bpe_unigram50000/tokens.txt --non_linguistic_symbols none --cleaner none --g2p none --valid_data_path_and_name_and_type dump/raw/dev_v3/wav.scp,speech,kaldi_ark --valid_shape_file exp_owsm/s2t_stats_raw_bpe50000/valid/speech_shape --resume true --fold_length 80000 --output_dir exp_owsm/s2t_train_05b_ds_raw_bpe50000 --config conf/train_05b_ds.yaml --frontend_conf fs=16k --normalize=global_mvn --normalize_conf stats_file=exp_owsm/s2t_stats_raw_bpe50000/train/feats_stats.npz --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp,speech,kaldi_ark --train_shape_file exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev,text_prev,text --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc,text_ctc,text --fold_length 150 --train_data_path_and_name_and_type exp_owsm/s2t_stats_raw_bpe50000/splits8/text,text,text --multiple_iterator true --valid_data_path_and_name_and_type dump/raw/dev_v3/text.prev,text_prev,text --valid_data_path_and_name_and_type dump/raw/dev_v3/text.ctc,text_ctc,text --valid_data_path_and_name_and_type dump/raw/dev_v3/text,text,text --ngpu 8 --multiprocessing_distributed true --dist_launcher slurm --dist_init_method file:///mnt/home/williamchen/espnet/egs2/owsm_v3.1/s2t1/exp_owsm/s2t_train_05b_ds_raw_bpe50000/.dist_init_58cb090c-176f-43a9-9bad-48e7526d7140 [2024-12-09 21:39:12,879] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:14,148] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:13,077] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:13,602] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:14,818] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:13,888] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:14,022] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:14,065] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:14,163] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:14,232] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:15,507] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:15,810] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:15,981] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:16,068] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:16,097] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:39:16,121] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [W1209 21:39:20.319256787 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:21.520263135 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:22.157329502 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:21.010852675 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:22.546653608 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:21.268890897 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:21.369155873 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:22.461366244 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:22.744412169 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [cnode7-018:0/16] 2024-12-09 21:39:22,360 (s2t:462) INFO: Vocabulary size: 50002 [W1209 21:39:22.815828506 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:23.234216893 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:23.478411320 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:23.532943470 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:24.769348648 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:24.817521637 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [W1209 21:39:24.908529050 Utils.hpp:164] Warning: Environment variable NCCL_BLOCKING_WAIT is deprecated; use TORCH_NCCL_BLOCKING_WAIT instead (function operator()) [cnode7-018:0/16] 2024-12-09 21:39:26,572 (abs_task:1383) INFO: pytorch.version=2.4.0+cu121, cuda.available=True, cudnn.version=90100, cudnn.benchmark=False, cudnn.deterministic=True [cnode7-018:0/16] 2024-12-09 21:39:26,578 (abs_task:1384) INFO: Model structure: ESPnetS2TModel( (frontend): DefaultFrontend( (stft): Stft(n_fft=512, win_length=400, hop_length=160, center=True, normalized=False, onesided=True) (frontend): Frontend() (logmel): LogMel(sr=16000, n_fft=512, n_mels=80, fmin=0, fmax=8000.0, htk=False) ) (specaug): SpecAug( (freq_mask): MaskAlongAxis(mask_width_range=[0, 27], num_mask=2, axis=freq) (time_mask): MaskAlongAxisVariableMaxWidth(mask_width_ratio_range=[0.0, 0.05], num_mask=10, axis=time) ) (normalize): GlobalMVN(stats_file=exp_owsm/s2t_stats_raw_bpe50000/train/feats_stats.npz, norm_means=True, norm_vars=True) (encoder): TransformerEncoder( (embed): Conv2dSubsampling( (conv): Sequential( (0): Conv2d(1, 1024, kernel_size=(3, 3), stride=(2, 2)) (1): ReLU() (2): Conv2d(1024, 1024, kernel_size=(3, 3), stride=(2, 2)) (3): ReLU() ) (out): Sequential( (0): Linear(in_features=19456, out_features=1024, bias=True) (1): PositionalEncoding( (dropout): Dropout(p=0.1, inplace=False) ) ) ) (encoders): MultiSequential( (0): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (1): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (2): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (3): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (4): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (5): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (6): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (7): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (8): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (9): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (10): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (11): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (12): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (13): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (14): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (15): EncoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) (after_norm): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) ) (decoder): TransformerDecoder( (embed): Sequential( (0): Embedding(50002, 1024) (1): PositionalEncoding( (dropout): Dropout(p=0.1, inplace=False) ) ) (after_norm): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (output_layer): Linear(in_features=1024, out_features=50002, bias=True) (decoders): MultiSequential( (0): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (1): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (2): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (3): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (4): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (5): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (6): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (7): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (8): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (9): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (10): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (11): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (12): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (13): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (14): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) (15): DecoderLayer( (self_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (src_attn): MultiHeadedAttention( (linear_q): Linear(in_features=1024, out_features=1024, bias=True) (linear_k): Linear(in_features=1024, out_features=1024, bias=True) (linear_v): Linear(in_features=1024, out_features=1024, bias=True) (linear_out): Linear(in_features=1024, out_features=1024, bias=True) (dropout): Identity() (q_norm): Identity() (k_norm): Identity() ) (feed_forward): PositionwiseFeedForward( (w_1): Linear(in_features=1024, out_features=4096, bias=True) (w_2): Linear(in_features=4096, out_features=1024, bias=True) (dropout): Dropout(p=0.1, inplace=False) (activation): ReLU() ) (norm1): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm2): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (norm3): LayerNorm((1024,), eps=1e-12, elementwise_affine=True) (dropout): Dropout(p=0.1, inplace=False) ) ) ) (criterion_att): LabelSmoothingLoss( (criterion): KLDivLoss() ) (ctc): CTC( (ctc_lo): Linear(in_features=1024, out_features=50002, bias=True) (ctc_loss): CTCLoss() ) ) Model summary: Class Name: ESPnetS2TModel Total Number of model parameters: 653.37 M Number of trainable parameters: 653.37 M (100.0%) Size: 2.61 GB Type: torch.float32 [cnode7-018:0/16] 2024-12-09 21:39:26,578 (abs_task:1387) INFO: Optimizer: Adadelta ( Parameter Group 0 capturable: False differentiable: False eps: 1e-06 foreach: None lr: 1.0 maximize: False rho: 0.9 weight_decay: 0 ) [cnode7-018:0/16] 2024-12-09 21:39:26,578 (abs_task:1388) INFO: Scheduler: None [cnode7-018:0/16] 2024-12-09 21:39:26,583 (abs_task:1397) INFO: Saving the configuration in exp_owsm/s2t_train_05b_ds_raw_bpe50000/config.yaml [cnode7-018:0/16] 2024-12-09 21:39:31,498 (s2t:444) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [2024-12-09 21:39:32,326] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:32,328] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [2024-12-09 21:39:32,572] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:32,574] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [2024-12-09 21:39:33,978] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:33,980] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 [2024-12-09 21:39:32,897] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:32,898] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [2024-12-09 21:39:33,274] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:33,275] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 [2024-12-09 21:39:33,282] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:33,283] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 [2024-12-09 21:39:33,348] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:33,349] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [2024-12-09 21:39:34,923] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:34,924] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 [2024-12-09 21:39:35,042] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:35,043] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [2024-12-09 21:39:35,088] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:35,089] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [2024-12-09 21:39:35,283] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:35,284] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 [2024-12-09 21:39:35,410] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:35,411] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 [cnode7-018:0/16] 2024-12-09 21:39:34,250 (abs_task:1807) INFO: [valid] dataset: ESPnetDataset( speech: {"path": "dump/raw/dev_v3/wav.scp", "type": "kaldi_ark"} text_prev: {"path": "dump/raw/dev_v3/text.prev", "type": "text"} text_ctc: {"path": "dump/raw/dev_v3/text.ctc", "type": "text"} text: {"path": "dump/raw/dev_v3/text", "type": "text"} preprocess: ) [cnode7-018:0/16] 2024-12-09 21:39:34,250 (abs_task:1808) INFO: [valid] Batch sampler: SortedBatchSampler(N-batch=74743, batch_size=16, shape_file=exp_owsm/s2t_stats_raw_bpe50000/valid/speech_shape, sort_in_batch=descending, sort_batch=descending) [cnode7-018:0/16] 2024-12-09 21:39:34,256 (abs_task:1809) INFO: [valid] mini-batch sizes summary: N-batch=74743, mean=16.0, min=16, max=17 /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( [cnode7-018:0/16] 2024-12-09 21:39:34,310 (distributed_utils:129) WARNING: ================================================================= Found OMP_NUM_THREADS=1 in environment variables. With some advanced features, DeepSpeed may have heavy cpu workload so that OMP_NUM_THREADS=1 is not sufficient. Try to increase it in your path.sh ================================================================= [2024-12-09 21:39:34,310] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:34,311] [INFO] [logging.py:129:log_dist] [Rank 0] DeepSpeed info: version=0.15.3, git-hash=unknown, git-branch=unknown [2024-12-09 21:39:34,312] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 cnode7-018:4121586:4121586 [0] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121586:4121586 [0] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121586:4121586 [0] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121586:4121586 [0] NCCL INFO cudaDriverVersion 12020 NCCL version 2.20.5+cuda12.4 cnode7-019:254254:254254 [7] NCCL INFO cudaDriverVersion 12020 cnode7-019:254254:254254 [7] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254254:254254 [7] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254254:254254 [7] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121589:4121589 [3] NCCL INFO cudaDriverVersion 12020 cnode7-018:4121589:4121589 [3] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121589:4121589 [3] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121589:4121589 [3] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121593:4121593 [7] NCCL INFO cudaDriverVersion 12020 cnode7-018:4121593:4121593 [7] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121593:4121593 [7] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121593:4121593 [7] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121587:4121587 [1] NCCL INFO cudaDriverVersion 12020 cnode7-018:4121587:4121587 [1] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121587:4121587 [1] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121587:4121587 [1] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121592:4121592 [6] NCCL INFO cudaDriverVersion 12020 cnode7-018:4121592:4121592 [6] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121592:4121592 [6] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121592:4121592 [6] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( cnode7-019:254253:254253 [6] NCCL INFO cudaDriverVersion 12020 cnode7-019:254253:254253 [6] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254253:254253 [6] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254253:254253 [6] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation [2024-12-09 21:39:34,471] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:34,472] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 cnode7-019:254250:254250 [3] NCCL INFO cudaDriverVersion 12020 cnode7-019:254250:254250 [3] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254250:254250 [3] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254250:254250 [3] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-019:254251:254251 [4] NCCL INFO cudaDriverVersion 12020 cnode7-019:254251:254251 [4] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254251:254251 [4] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254251:254251 [4] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-019:254252:254252 [5] NCCL INFO cudaDriverVersion 12020 cnode7-019:254252:254252 [5] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254252:254252 [5] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254252:254252 [5] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121591:4121591 [5] NCCL INFO cudaDriverVersion 12020 cnode7-018:4121591:4121591 [5] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121591:4121591 [5] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121591:4121591 [5] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-019:254249:254249 [2] NCCL INFO cudaDriverVersion 12020 cnode7-019:254249:254249 [2] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254249:254249 [2] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254249:254249 [2] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121590:4121590 [4] NCCL INFO cudaDriverVersion 12020 cnode7-018:4121590:4121590 [4] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121590:4121590 [4] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121590:4121590 [4] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121586:4122550 [0] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254249:255071 [2] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121588:4121588 [2] NCCL INFO cudaDriverVersion 12020 cnode7-018:4121588:4121588 [2] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121588:4121588 [2] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.180<0> cnode7-018:4121588:4121588 [2] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-019:254254:255068 [7] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121589:4122557 [3] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121593:4122551 [7] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121587:4122554 [1] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121592:4122553 [6] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( cnode7-019:254253:255069 [6] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254253:255069 [6] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254253:255069 [6] NCCL INFO Using non-device net plugin version 0 cnode7-019:254253:255069 [6] NCCL INFO Using network IB cnode7-019:254250:255074 [3] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254251:255072 [4] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254251:255072 [4] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254251:255072 [4] NCCL INFO Using non-device net plugin version 0 cnode7-019:254251:255072 [4] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py:240: InstrumentationWarning: @typechecked only supports instrumenting functions wrapped with @classmethod, @staticmethod or @property -- not typechecking espnet2.train.deepspeed_trainer.DeepSpeedTrainer.valid_one_epoch def valid_one_epoch( cnode7-019:254252:255073 [5] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254252:255073 [5] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254252:255073 [5] NCCL INFO Using non-device net plugin version 0 cnode7-019:254252:255073 [5] NCCL INFO Using network IB cnode7-018:4121591:4122555 [5] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121591:4122555 [5] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121591:4122555 [5] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121591:4122555 [5] NCCL INFO Using network IB cnode7-019:254249:255071 [2] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254249:255071 [2] NCCL INFO Using non-device net plugin version 0 cnode7-019:254249:255071 [2] NCCL INFO Using network IB cnode7-019:254254:255068 [7] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254254:255068 [7] NCCL INFO Using non-device net plugin version 0 cnode7-019:254254:255068 [7] NCCL INFO Using network IB cnode7-018:4121590:4122556 [4] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121590:4122556 [4] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121590:4122556 [4] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121590:4122556 [4] NCCL INFO Using network IB cnode7-018:4121589:4122557 [3] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121589:4122557 [3] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121589:4122557 [3] NCCL INFO Using network IB cnode7-018:4121593:4122551 [7] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121593:4122551 [7] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121593:4122551 [7] NCCL INFO Using network IB cnode7-018:4121587:4122554 [1] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121587:4122554 [1] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121587:4122554 [1] NCCL INFO Using network IB cnode7-018:4121592:4122553 [6] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121592:4122553 [6] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121592:4122553 [6] NCCL INFO Using network IB cnode7-019:254250:255074 [3] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254250:255074 [3] NCCL INFO Using non-device net plugin version 0 cnode7-019:254250:255074 [3] NCCL INFO Using network IB cnode7-018:4121586:4122550 [0] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121586:4122550 [0] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121586:4122550 [0] NCCL INFO Using network IB [2024-12-09 21:39:36,032] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:36,034] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 [2024-12-09 21:39:36,058] [INFO] [comm.py:652:init_distributed] cdb=None [2024-12-09 21:39:36,061] [INFO] [config.py:733:__init__] Config mesh_device None world_size = 16 cnode7-019:254248:254248 [1] NCCL INFO cudaDriverVersion 12020 cnode7-019:254248:254248 [1] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254248:254248 [1] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254248:254248 [1] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-019:254247:254247 [0] NCCL INFO cudaDriverVersion 12020 cnode7-019:254247:254247 [0] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254247:254247 [0] NCCL INFO Bootstrap : Using enp170s0np0:10.225.1.186<0> cnode7-019:254247:254247 [0] NCCL INFO NET/Plugin : dlerror=libnccl-net.so: cannot open shared object file: No such file or directory No plugin found (libnccl-net.so), using internal implementation cnode7-018:4121588:4122560 [2] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254247:255137 [0] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-018:4121588:4122560 [2] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.180<0> cnode7-018:4121588:4122560 [2] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121588:4122560 [2] NCCL INFO Using network IB cnode7-019:254248:255136 [1] NCCL INFO NCCL_SOCKET_IFNAME set by environment to ^lo,docker,virbr,vmnet,vboxnet cnode7-019:254247:255137 [0] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254247:255137 [0] NCCL INFO Using non-device net plugin version 0 cnode7-019:254247:255137 [0] NCCL INFO Using network IB cnode7-019:254248:255136 [1] NCCL INFO NET/IB : Using [0]mlx5_0:1/IB [1]mlx5_3:1/IB [2]mlx5_4:1/IB [3]mlx5_5:1/IB [4]mlx5_6:1/IB [5]mlx5_7:1/RoCE [6]mlx5_8:1/IB [7]mlx5_9:1/IB [8]mlx5_10:1/IB [RO]; OOB enp170s0np0:10.225.1.186<0> cnode7-019:254248:255136 [1] NCCL INFO Using non-device net plugin version 0 cnode7-019:254248:255136 [1] NCCL INFO Using network IB cnode7-019:254253:255069 [6] NCCL INFO comm 0x55556ca1a610 rank 14 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x41095aaa98a46250 - Init START cnode7-019:254250:255074 [3] NCCL INFO comm 0x55557c420300 rank 11 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x41095aaa98a46250 - Init START cnode7-019:254251:255072 [4] NCCL INFO comm 0x555560c1a420 rank 12 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x41095aaa98a46250 - Init START cnode7-019:254252:255073 [5] NCCL INFO comm 0x555580e21130 rank 13 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121586:4122550 [0] NCCL INFO comm 0x55556136dea0 rank 0 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121591:4122555 [5] NCCL INFO comm 0x55558061ab60 rank 5 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x41095aaa98a46250 - Init START cnode7-019:254249:255071 [2] NCCL INFO comm 0x555569a23730 rank 10 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x41095aaa98a46250 - Init START cnode7-019:254247:255137 [0] NCCL INFO comm 0x555564d9eae0 rank 8 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x41095aaa98a46250 - Init START cnode7-019:254254:255068 [7] NCCL INFO comm 0x55557261e8f0 rank 15 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121588:4122560 [2] NCCL INFO comm 0x55558161caa0 rank 2 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121590:4122556 [4] NCCL INFO comm 0x55558061b4f0 rank 4 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x41095aaa98a46250 - Init START cnode7-019:254248:255136 [1] NCCL INFO comm 0x55557261e430 rank 9 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121589:4122557 [3] NCCL INFO comm 0x55557261c1c0 rank 3 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121587:4122554 [1] NCCL INFO comm 0x55557bc18d00 rank 1 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121593:4122551 [7] NCCL INFO comm 0x55556181aa40 rank 7 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121592:4122553 [6] NCCL INFO comm 0x55558461d120 rank 6 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x41095aaa98a46250 - Init START cnode7-018:4121588:4122560 [2] NCCL INFO Setting affinity for GPU 2 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121588:4122560 [2] NCCL INFO NVLS multicast support is available on dev 2 cnode7-019:254254:255068 [7] NCCL INFO Setting affinity for GPU 7 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254254:255068 [7] NCCL INFO NVLS multicast support is available on dev 7 cnode7-018:4121590:4122556 [4] NCCL INFO Setting affinity for GPU 4 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS multicast support is available on dev 4 cnode7-018:4121589:4122557 [3] NCCL INFO Setting affinity for GPU 3 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121589:4122557 [3] NCCL INFO NVLS multicast support is available on dev 3 cnode7-018:4121592:4122553 [6] NCCL INFO Setting affinity for GPU 6 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS multicast support is available on dev 6 cnode7-019:254253:255069 [6] NCCL INFO Setting affinity for GPU 6 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254253:255069 [6] NCCL INFO NVLS multicast support is available on dev 6 cnode7-019:254249:255071 [2] NCCL INFO Setting affinity for GPU 2 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254249:255071 [2] NCCL INFO NVLS multicast support is available on dev 2 cnode7-018:4121587:4122554 [1] NCCL INFO Setting affinity for GPU 1 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121587:4122554 [1] NCCL INFO NVLS multicast support is available on dev 1 cnode7-018:4121593:4122551 [7] NCCL INFO Setting affinity for GPU 7 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS multicast support is available on dev 7 cnode7-019:254250:255074 [3] NCCL INFO Setting affinity for GPU 3 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254250:255074 [3] NCCL INFO NVLS multicast support is available on dev 3 cnode7-019:254252:255073 [5] NCCL INFO Setting affinity for GPU 5 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254252:255073 [5] NCCL INFO NVLS multicast support is available on dev 5 cnode7-018:4121586:4122550 [0] NCCL INFO Setting affinity for GPU 0 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121586:4122550 [0] NCCL INFO NVLS multicast support is available on dev 0 cnode7-018:4121591:4122555 [5] NCCL INFO Setting affinity for GPU 5 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS multicast support is available on dev 5 cnode7-019:254247:255137 [0] NCCL INFO Setting affinity for GPU 0 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254247:255137 [0] NCCL INFO NVLS multicast support is available on dev 0 cnode7-018:4121593:4122551 [7] NCCL INFO comm 0x55556181aa40 rank 7 nRanks 16 nNodes 2 localRanks 8 localRank 7 MNNVL 0 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121593:4122551 [7] NCCL INFO Trees [0] -1/-1/-1->7->6 [1] 0/-1/-1->7->6 [2] 0/-1/-1->7->6 [3] 0/-1/-1->7->6 [4] 0/-1/-1->7->6 [5] 0/-1/-1->7->6 [6] 0/-1/-1->7->6 [7] 0/15/-1->7->-1 [8] -1/-1/-1->7->6 [9] 0/-1/-1->7->6 [10] 0/-1/-1->7->6 [11] 0/-1/-1->7->6 [12] 0/-1/-1->7->6 [13] 0/-1/-1->7->6 [14] 0/-1/-1->7->6 [15] 0/-1/-1->7->15 cnode7-018:4121593:4122551 [7] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121586:4122550 [0] NCCL INFO comm 0x55556136dea0 rank 0 nRanks 16 nNodes 2 localRanks 8 localRank 0 MNNVL 0 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 00/16 : 0 7 6 5 4 3 2 1 9 10 11 12 13 14 15 8 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 01/16 : 0 8 15 14 13 12 11 10 9 1 2 3 4 5 6 7 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 02/16 : 0 7 6 5 4 3 11 12 13 14 15 8 9 10 2 1 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 03/16 : 0 1 2 10 9 8 15 14 13 12 11 3 4 5 6 7 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 04/16 : 0 7 6 5 13 14 15 8 9 10 11 12 4 3 2 1 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 05/16 : 0 1 2 3 4 12 11 10 9 8 15 14 13 5 6 7 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 06/16 : 0 7 15 8 9 10 11 12 13 14 6 5 4 3 2 1 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 07/16 : 0 1 2 3 4 5 6 14 13 12 11 10 9 8 15 7 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 08/16 : 0 7 6 5 4 3 2 1 9 10 11 12 13 14 15 8 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 09/16 : 0 8 15 14 13 12 11 10 9 1 2 3 4 5 6 7 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 10/16 : 0 7 6 5 4 3 11 12 13 14 15 8 9 10 2 1 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 11/16 : 0 1 2 10 9 8 15 14 13 12 11 3 4 5 6 7 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 12/16 : 0 7 6 5 13 14 15 8 9 10 11 12 4 3 2 1 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 13/16 : 0 1 2 3 4 12 11 10 9 8 15 14 13 5 6 7 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 14/16 : 0 7 15 8 9 10 11 12 13 14 6 5 4 3 2 1 cnode7-018:4121586:4122550 [0] NCCL INFO Channel 15/16 : 0 1 2 3 4 5 6 14 13 12 11 10 9 8 15 7 cnode7-018:4121586:4122550 [0] NCCL INFO Trees [0] 1/8/-1->0->-1 [1] -1/-1/-1->0->7 [2] 1/-1/-1->0->7 [3] 1/-1/-1->0->7 [4] 1/-1/-1->0->7 [5] 1/-1/-1->0->7 [6] 1/-1/-1->0->7 [7] 1/-1/-1->0->7 [8] 1/-1/-1->0->8 [9] -1/-1/-1->0->7 [10] 1/-1/-1->0->7 [11] 1/-1/-1->0->7 [12] 1/-1/-1->0->7 [13] 1/-1/-1->0->7 [14] 1/-1/-1->0->7 [15] 1/-1/-1->0->7 cnode7-018:4121586:4122550 [0] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254251:255072 [4] NCCL INFO Setting affinity for GPU 4 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254251:255072 [4] NCCL INFO NVLS multicast support is available on dev 4 cnode7-019:254251:255072 [4] NCCL INFO comm 0x555560c1a420 rank 12 nRanks 16 nNodes 2 localRanks 8 localRank 4 MNNVL 0 cnode7-019:254251:255072 [4] NCCL INFO Trees [0] 13/-1/-1->12->11 [1] 13/-1/-1->12->11 [2] 13/-1/-1->12->11 [3] 13/-1/-1->12->11 [4] 13/-1/-1->12->4 [5] -1/-1/-1->12->11 [6] 13/-1/-1->12->11 [7] 13/-1/-1->12->11 [8] 13/-1/-1->12->11 [9] 13/-1/-1->12->11 [10] 13/-1/-1->12->11 [11] 13/-1/-1->12->11 [12] 13/4/-1->12->-1 [13] -1/-1/-1->12->11 [14] 13/-1/-1->12->11 [15] 13/-1/-1->12->11 cnode7-019:254251:255072 [4] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121591:4122555 [5] NCCL INFO comm 0x55558061ab60 rank 5 nRanks 16 nNodes 2 localRanks 8 localRank 5 MNNVL 0 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121591:4122555 [5] NCCL INFO Trees [0] 6/-1/-1->5->4 [1] 6/-1/-1->5->4 [2] 6/-1/-1->5->4 [3] 6/-1/-1->5->4 [4] 6/-1/-1->5->4 [5] 6/13/-1->5->-1 [6] -1/-1/-1->5->4 [7] 6/-1/-1->5->4 [8] 6/-1/-1->5->4 [9] 6/-1/-1->5->4 [10] 6/-1/-1->5->4 [11] 6/-1/-1->5->4 [12] 6/-1/-1->5->4 [13] 6/-1/-1->5->13 [14] -1/-1/-1->5->4 [15] 6/-1/-1->5->4 cnode7-018:4121591:4122555 [5] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121592:4122553 [6] NCCL INFO comm 0x55558461d120 rank 6 nRanks 16 nNodes 2 localRanks 8 localRank 6 MNNVL 0 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121592:4122553 [6] NCCL INFO Trees [0] 7/-1/-1->6->5 [1] 7/-1/-1->6->5 [2] 7/-1/-1->6->5 [3] 7/-1/-1->6->5 [4] 7/-1/-1->6->5 [5] 7/-1/-1->6->5 [6] 7/14/-1->6->-1 [7] -1/-1/-1->6->5 [8] 7/-1/-1->6->5 [9] 7/-1/-1->6->5 [10] 7/-1/-1->6->5 [11] 7/-1/-1->6->5 [12] 7/-1/-1->6->5 [13] 7/-1/-1->6->5 [14] 7/-1/-1->6->14 [15] -1/-1/-1->6->5 cnode7-018:4121592:4122553 [6] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254253:255069 [6] NCCL INFO comm 0x55556ca1a610 rank 14 nRanks 16 nNodes 2 localRanks 8 localRank 6 MNNVL 0 cnode7-019:254253:255069 [6] NCCL INFO Trees [0] 15/-1/-1->14->13 [1] 15/-1/-1->14->13 [2] 15/-1/-1->14->13 [3] 15/-1/-1->14->13 [4] 15/-1/-1->14->13 [5] 15/-1/-1->14->13 [6] 15/-1/-1->14->6 [7] -1/-1/-1->14->13 [8] 15/-1/-1->14->13 [9] 15/-1/-1->14->13 [10] 15/-1/-1->14->13 [11] 15/-1/-1->14->13 [12] 15/-1/-1->14->13 [13] 15/-1/-1->14->13 [14] 15/6/-1->14->-1 [15] -1/-1/-1->14->13 cnode7-019:254253:255069 [6] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254249:255071 [2] NCCL INFO comm 0x555569a23730 rank 10 nRanks 16 nNodes 2 localRanks 8 localRank 2 MNNVL 0 cnode7-019:254249:255071 [2] NCCL INFO Trees [0] 11/-1/-1->10->9 [1] 11/-1/-1->10->9 [2] 11/-1/-1->10->2 [3] -1/-1/-1->10->9 [4] 11/-1/-1->10->9 [5] 11/-1/-1->10->9 [6] 11/-1/-1->10->9 [7] 11/-1/-1->10->9 [8] 11/-1/-1->10->9 [9] 11/-1/-1->10->9 [10] 11/2/-1->10->-1 [11] -1/-1/-1->10->9 [12] 11/-1/-1->10->9 [13] 11/-1/-1->10->9 [14] 11/-1/-1->10->9 [15] 11/-1/-1->10->9 cnode7-019:254249:255071 [2] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254247:255137 [0] NCCL INFO comm 0x555564d9eae0 rank 8 nRanks 16 nNodes 2 localRanks 8 localRank 0 MNNVL 0 cnode7-019:254247:255137 [0] NCCL INFO Trees [0] 9/-1/-1->8->0 [1] -1/-1/-1->8->15 [2] 9/-1/-1->8->15 [3] 9/-1/-1->8->15 [4] 9/-1/-1->8->15 [5] 9/-1/-1->8->15 [6] 9/-1/-1->8->15 [7] 9/-1/-1->8->15 [8] 9/0/-1->8->-1 [9] -1/-1/-1->8->15 [10] 9/-1/-1->8->15 [11] 9/-1/-1->8->15 [12] 9/-1/-1->8->15 [13] 9/-1/-1->8->15 [14] 9/-1/-1->8->15 [15] 9/-1/-1->8->15 cnode7-019:254247:255137 [0] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254250:255074 [3] NCCL INFO comm 0x55557c420300 rank 11 nRanks 16 nNodes 2 localRanks 8 localRank 3 MNNVL 0 cnode7-019:254250:255074 [3] NCCL INFO Trees [0] 12/-1/-1->11->10 [1] 12/-1/-1->11->10 [2] 12/-1/-1->11->10 [3] 12/-1/-1->11->3 [4] -1/-1/-1->11->10 [5] 12/-1/-1->11->10 [6] 12/-1/-1->11->10 [7] 12/-1/-1->11->10 [8] 12/-1/-1->11->10 [9] 12/-1/-1->11->10 [10] 12/-1/-1->11->10 [11] 12/3/-1->11->-1 [12] -1/-1/-1->11->10 [13] 12/-1/-1->11->10 [14] 12/-1/-1->11->10 [15] 12/-1/-1->11->10 cnode7-019:254250:255074 [3] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254248:255136 [1] NCCL INFO Setting affinity for GPU 1 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254248:255136 [1] NCCL INFO NVLS multicast support is available on dev 1 cnode7-019:254248:255136 [1] NCCL INFO comm 0x55557261e430 rank 9 nRanks 16 nNodes 2 localRanks 8 localRank 1 MNNVL 0 cnode7-019:254248:255136 [1] NCCL INFO Trees [0] 10/-1/-1->9->8 [1] 10/-1/-1->9->1 [2] -1/-1/-1->9->8 [3] 10/-1/-1->9->8 [4] 10/-1/-1->9->8 [5] 10/-1/-1->9->8 [6] 10/-1/-1->9->8 [7] 10/-1/-1->9->8 [8] 10/-1/-1->9->8 [9] 10/1/-1->9->-1 [10] -1/-1/-1->9->8 [11] 10/-1/-1->9->8 [12] 10/-1/-1->9->8 [13] 10/-1/-1->9->8 [14] 10/-1/-1->9->8 [15] 10/-1/-1->9->8 cnode7-019:254248:255136 [1] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254252:255073 [5] NCCL INFO comm 0x555580e21130 rank 13 nRanks 16 nNodes 2 localRanks 8 localRank 5 MNNVL 0 cnode7-019:254252:255073 [5] NCCL INFO Trees [0] 14/-1/-1->13->12 [1] 14/-1/-1->13->12 [2] 14/-1/-1->13->12 [3] 14/-1/-1->13->12 [4] 14/-1/-1->13->12 [5] 14/-1/-1->13->5 [6] -1/-1/-1->13->12 [7] 14/-1/-1->13->12 [8] 14/-1/-1->13->12 [9] 14/-1/-1->13->12 [10] 14/-1/-1->13->12 [11] 14/-1/-1->13->12 [12] 14/-1/-1->13->12 [13] 14/5/-1->13->-1 [14] -1/-1/-1->13->12 [15] 14/-1/-1->13->12 cnode7-019:254252:255073 [5] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254254:255068 [7] NCCL INFO comm 0x55557261e8f0 rank 15 nRanks 16 nNodes 2 localRanks 8 localRank 7 MNNVL 0 cnode7-019:254254:255068 [7] NCCL INFO Trees [0] -1/-1/-1->15->14 [1] 8/-1/-1->15->14 [2] 8/-1/-1->15->14 [3] 8/-1/-1->15->14 [4] 8/-1/-1->15->14 [5] 8/-1/-1->15->14 [6] 8/-1/-1->15->14 [7] 8/-1/-1->15->7 [8] -1/-1/-1->15->14 [9] 8/-1/-1->15->14 [10] 8/-1/-1->15->14 [11] 8/-1/-1->15->14 [12] 8/-1/-1->15->14 [13] 8/-1/-1->15->14 [14] 8/-1/-1->15->14 [15] 8/7/-1->15->-1 cnode7-019:254254:255068 [7] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121588:4122560 [2] NCCL INFO comm 0x55558161caa0 rank 2 nRanks 16 nNodes 2 localRanks 8 localRank 2 MNNVL 0 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121588:4122560 [2] NCCL INFO Trees [0] 3/-1/-1->2->1 [1] 3/-1/-1->2->1 [2] 3/10/-1->2->-1 [3] -1/-1/-1->2->1 [4] 3/-1/-1->2->1 [5] 3/-1/-1->2->1 [6] 3/-1/-1->2->1 [7] 3/-1/-1->2->1 [8] 3/-1/-1->2->1 [9] 3/-1/-1->2->1 [10] 3/-1/-1->2->10 [11] -1/-1/-1->2->1 [12] 3/-1/-1->2->1 [13] 3/-1/-1->2->1 [14] 3/-1/-1->2->1 [15] 3/-1/-1->2->1 cnode7-018:4121588:4122560 [2] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121589:4122557 [3] NCCL INFO comm 0x55557261c1c0 rank 3 nRanks 16 nNodes 2 localRanks 8 localRank 3 MNNVL 0 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121589:4122557 [3] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121589:4122557 [3] NCCL INFO Trees [0] 4/-1/-1->3->2 [1] 4/-1/-1->3->2 [2] 4/-1/-1->3->2 [3] 4/11/-1->3->-1 [4] -1/-1/-1->3->2 [5] 4/-1/-1->3->2 [6] 4/-1/-1->3->2 [7] 4/-1/-1->3->2 [8] 4/-1/-1->3->2 [9] 4/-1/-1->3->2 [10] 4/-1/-1->3->2 [11] 4/-1/-1->3->11 [12] -1/-1/-1->3->2 [13] 4/-1/-1->3->2 [14] 4/-1/-1->3->2 [15] 4/-1/-1->3->2 cnode7-018:4121589:4122557 [3] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121590:4122556 [4] NCCL INFO comm 0x55558061b4f0 rank 4 nRanks 16 nNodes 2 localRanks 8 localRank 4 MNNVL 0 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121590:4122556 [4] NCCL INFO Trees [0] 5/-1/-1->4->3 [1] 5/-1/-1->4->3 [2] 5/-1/-1->4->3 [3] 5/-1/-1->4->3 [4] 5/12/-1->4->-1 [5] -1/-1/-1->4->3 [6] 5/-1/-1->4->3 [7] 5/-1/-1->4->3 [8] 5/-1/-1->4->3 [9] 5/-1/-1->4->3 [10] 5/-1/-1->4->3 [11] 5/-1/-1->4->3 [12] 5/-1/-1->4->12 [13] -1/-1/-1->4->3 [14] 5/-1/-1->4->3 [15] 5/-1/-1->4->3 cnode7-018:4121590:4122556 [4] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254248:255136 [1] NCCL INFO Channel 00/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 02/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 04/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 06/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 08/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 10/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 12/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 14/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO comm 0x55557bc18d00 rank 1 nRanks 16 nNodes 2 localRanks 8 localRank 1 MNNVL 0 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121587:4122554 [1] NCCL INFO Trees [0] 2/-1/-1->1->0 [1] 2/9/-1->1->-1 [2] -1/-1/-1->1->0 [3] 2/-1/-1->1->0 [4] 2/-1/-1->1->0 [5] 2/-1/-1->1->0 [6] 2/-1/-1->1->0 [7] 2/-1/-1->1->0 [8] 2/-1/-1->1->0 [9] 2/-1/-1->1->9 [10] -1/-1/-1->1->0 [11] 2/-1/-1->1->0 [12] 2/-1/-1->1->0 [13] 2/-1/-1->1->0 [14] 2/-1/-1->1->0 [15] 2/-1/-1->1->0 cnode7-018:4121587:4122554 [1] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121592:4122553 [6] NCCL INFO Channel 01/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 03/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 05/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 09/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 11/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 13/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 06/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 14/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 07/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 15/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 00/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 02/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 04/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 08/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 10/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 12/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 07/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 15/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 06/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 14/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 01/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 03/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 05/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 07/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 09/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 11/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 13/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 15/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 00/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 02/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 04/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 06/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 08/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 10/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 12/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 14/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 02/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 10/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 03/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 11/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 00/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 02/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 06/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 08/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 10/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 14/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 05/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 13/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 04/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 12/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 00/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 02/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 04/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 06/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 08/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 10/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 12/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 14/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 04/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 12/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 05/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 13/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 03/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 05/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 07/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 11/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 13/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 15/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 01/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 03/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 05/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 07/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 09/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 11/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 13/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 15/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 00/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 04/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 06/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 08/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 12/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 14/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 03/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 11/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 02/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 10/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 01/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 05/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 07/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 09/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 13/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 15/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 02/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 04/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 06/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 10/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 12/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 14/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 01/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 03/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 05/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 07/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 09/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 11/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 13/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 15/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 01/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 09/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 00/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 08/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 06/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 14/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 07/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 15/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 00/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 08/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 01/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 09/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 01/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 03/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 05/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 07/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 09/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 11/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 13/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 15/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 03/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 11/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 02/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 10/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 01/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 00/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 02/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 04/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 06/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 08/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 10/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 12/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 14/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 01/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 05/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 07/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 09/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 13/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 15/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 01/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 03/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 05/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 07/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 09/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 11/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 13/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 15/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 01/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 03/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 07/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 09/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 11/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 15/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 05/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 13/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 04/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 12/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 00/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 02/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 06/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 08/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 10/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 14/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 01/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 03/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 07/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 09/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 11/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 15/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 04/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 12/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 05/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 13/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 00/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 02/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 04/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 06/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 08/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 10/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 12/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 14/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 00/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 04/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 06/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 08/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 12/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 14/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 03/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 05/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 07/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 09/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 11/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 13/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 15/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 01/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 09/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 00/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 08/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 00/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 02/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 04/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 06/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 08/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 10/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 12/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 14/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 00/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 08/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 01/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 09/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 01/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 03/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 05/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 07/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 09/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 11/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 13/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 15/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 02/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 10/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 03/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 11/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 00/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 02/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 04/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 06/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 08/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 10/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 12/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 14/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 00/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 02/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 04/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 06/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 08/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 10/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 03/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 05/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 07/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 11/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 13/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 15/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 02/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 04/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 06/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 10/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 12/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 14/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 07/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 15/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 06/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 14/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 01/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 03/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 05/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 07/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 09/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 11/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 13/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 15/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 00/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 02/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 04/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 08/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 10/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 12/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 12/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 14/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 01/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 03/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 05/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 09/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 11/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 13/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Connected all rings cnode7-018:4121589:4122557 [3] NCCL INFO Connected all rings cnode7-018:4121589:4122557 [3] NCCL INFO Channel 00/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 02/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Connected all rings cnode7-019:254250:255074 [3] NCCL INFO Channel 01/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 03/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 05/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 07/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 09/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 11/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 13/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 15/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Connected all rings cnode7-019:254251:255072 [4] NCCL INFO Channel 01/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 03/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 04/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 07/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 09/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 11/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 12/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 15/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Connected all rings cnode7-019:254252:255073 [5] NCCL INFO Channel 01/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 03/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 05/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 07/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 09/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 11/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 13/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 15/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Connected all rings cnode7-018:4121586:4122550 [0] NCCL INFO Channel 00/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 02/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 04/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 06/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 08/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 10/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 12/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 14/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 01/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 03/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 05/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 07/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 09/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 11/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Connected all rings cnode7-018:4121591:4122555 [5] NCCL INFO Channel 00/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Connected all rings cnode7-019:254249:255071 [2] NCCL INFO Channel 01/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 02/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 05/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 07/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 09/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 10/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 13/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 15/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 02/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 10/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Connected all rings cnode7-018:4121588:4122560 [2] NCCL INFO Channel 00/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 02/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 04/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 06/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 08/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 10/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 12/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 14/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 02/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 10/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Connected all rings cnode7-019:254247:255137 [0] NCCL INFO Channel 00/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 03/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 05/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 07/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 08/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 11/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 13/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 15/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 02/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 04/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 06/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 10/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 12/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 14/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:255137 [0] NCCL INFO Channel 00/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 08/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Connected all rings cnode7-019:254254:255068 [7] NCCL INFO Channel 07/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 15/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 00/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 02/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 04/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 06/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 08/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 10/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 12/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 14/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 04/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 12/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 01/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Connected all rings cnode7-019:254248:255136 [1] NCCL INFO Channel 01/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 03/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 05/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 07/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 09/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 11/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 13/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 15/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 01/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 09/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 00/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 02/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 04/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 06/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 08/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 10/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 12/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:255136 [1] NCCL INFO Channel 14/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 06/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 08/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 10/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 14/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 03/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 11/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 01/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 02/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 05/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 07/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 09/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 10/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 13/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4122557 [3] NCCL INFO Channel 15/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Connected all rings cnode7-018:4121592:4122553 [6] NCCL INFO Channel 00/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 02/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 04/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 06/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 08/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 10/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 12/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 14/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 06/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 14/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 01/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 03/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 05/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 07/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 09/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 11/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 13/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4122553 [6] NCCL INFO Channel 15/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Connected all rings cnode7-018:4121587:4122554 [1] NCCL INFO Channel 00/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 04/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 06/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 08/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 12/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 14/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 01/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 09/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 00/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 03/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 05/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 07/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 08/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 11/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 13/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4122554 [1] NCCL INFO Channel 15/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Connected all rings cnode7-018:4121593:4122551 [7] NCCL INFO Channel 07/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 15/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 02/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 04/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 06/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 10/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 12/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 14/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 01/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 03/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 05/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 06/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 09/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 11/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 13/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4122551 [7] NCCL INFO Channel 14/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Connected all rings cnode7-019:254253:255069 [6] NCCL INFO Channel 01/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 03/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 05/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 06/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 09/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 11/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 13/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 14/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 06/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 14/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 00/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 02/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 04/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 08/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 10/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:255069 [6] NCCL INFO Channel 12/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 03/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 11/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 00/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 02/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 04/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 06/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 08/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 10/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 12/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:255074 [3] NCCL INFO Channel 14/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 04/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 12/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 00/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 02/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 06/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 08/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 10/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:255072 [4] NCCL INFO Channel 14/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 05/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 13/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 00/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 02/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 04/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 06/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 08/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 10/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 12/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:255073 [5] NCCL INFO Channel 14/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 13/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 15/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4122550 [0] NCCL INFO Channel 00/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 08/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 02/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 04/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 08/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 10/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 12/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 05/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 13/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 01/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 03/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 04/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 07/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 09/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 11/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 12/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4122555 [5] NCCL INFO Channel 15/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 00/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 04/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 06/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 08/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 12/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:255071 [2] NCCL INFO Channel 14/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 01/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 03/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 05/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 07/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 09/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 11/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 13/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Channel 15/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 01/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 03/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 05/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 07/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 09/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 11/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 13/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 15/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 00/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 02/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 04/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 06/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 08/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 10/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 12/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:255068 [7] NCCL INFO Channel 14/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 03/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 05/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 07/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 09/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 11/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 13/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4122556 [4] NCCL INFO Channel 15/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4122560 [2] NCCL INFO Connected all trees cnode7-019:254249:255071 [2] NCCL INFO Connected all trees cnode7-019:254250:255074 [3] NCCL INFO Connected all trees cnode7-018:4121589:4122557 [3] NCCL INFO Connected all trees cnode7-019:254251:255072 [4] NCCL INFO Connected all trees cnode7-018:4121590:4122556 [4] NCCL INFO Connected all trees cnode7-018:4121591:4122555 [5] NCCL INFO Connected all trees cnode7-019:254254:255068 [7] NCCL INFO Connected all trees cnode7-019:254247:255137 [0] NCCL INFO Connected all trees cnode7-019:254248:255136 [1] NCCL INFO Connected all trees cnode7-019:254253:255069 [6] NCCL INFO Connected all trees cnode7-018:4121592:4122553 [6] NCCL INFO Connected all trees cnode7-018:4121593:4122551 [7] NCCL INFO Connected all trees cnode7-018:4121586:4122550 [0] NCCL INFO Connected all trees cnode7-018:4121587:4122554 [1] NCCL INFO Connected all trees cnode7-019:254252:255073 [5] NCCL INFO Connected all trees cnode7-018:4121589:4122557 [3] NCCL INFO NVLS comm 0x55557261c1c0 headRank 3 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121592:4122553 [6] NCCL INFO NVLS comm 0x55558461d120 headRank 6 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121593:4122551 [7] NCCL INFO NVLS comm 0x55556181aa40 headRank 7 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121587:4122554 [1] NCCL INFO NVLS comm 0x55557bc18d00 headRank 1 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254253:255069 [6] NCCL INFO NVLS comm 0x55556ca1a610 headRank 6 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254250:255074 [3] NCCL INFO NVLS comm 0x55557c420300 headRank 3 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254251:255072 [4] NCCL INFO NVLS comm 0x555560c1a420 headRank 4 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254252:255073 [5] NCCL INFO NVLS comm 0x555580e21130 headRank 5 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121586:4122550 [0] NCCL INFO NVLS comm 0x55556136dea0 headRank 0 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121591:4122555 [5] NCCL INFO NVLS comm 0x55558061ab60 headRank 5 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254249:255071 [2] NCCL INFO NVLS comm 0x555569a23730 headRank 2 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121588:4122560 [2] NCCL INFO NVLS comm 0x55558161caa0 headRank 2 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254247:255137 [0] NCCL INFO NVLS comm 0x555564d9eae0 headRank 0 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254254:255068 [7] NCCL INFO NVLS comm 0x55557261e8f0 headRank 7 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121590:4122556 [4] NCCL INFO NVLS comm 0x55558061b4f0 headRank 4 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254248:255136 [1] NCCL INFO NVLS comm 0x55557261e430 headRank 1 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121589:4122557 [3] NCCL INFO Channel 00/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 01/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 02/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 04/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 05/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 06/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 07/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 08/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 00/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 01/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 02/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 03/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 04/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 05/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 07/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 08/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 09/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 10/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 11/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 12/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 13/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 15/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 00/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 01/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 02/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 03/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 04/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 05/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 08/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 09/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 10/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 11/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 12/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Channel 13/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 00/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 02/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 03/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 04/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 05/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 06/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 07/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 08/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 10/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 11/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 12/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 13/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 14/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 15/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 02/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 03/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 04/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 05/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 06/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 07/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 10/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 11/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 12/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 13/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 14/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4122554 [1] NCCL INFO Channel 15/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 00/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 01/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 02/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 03/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 04/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 05/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 06/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 08/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 09/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 10/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 11/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 12/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 13/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 14/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 00/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 01/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 02/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 03/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 04/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 05/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 08/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 09/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 10/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 11/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 12/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4122551 [7] NCCL INFO Channel 13/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 00/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 01/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 02/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 03/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 04/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 05/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 08/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 00/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 01/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 04/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 05/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 06/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 07/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 08/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 09/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 00/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 01/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 02/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 03/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 06/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 07/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 08/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 00/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 01/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 02/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 03/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 06/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 07/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 08/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 01/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 02/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 03/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 04/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 05/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 06/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 07/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 09/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 10/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 11/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 12/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 13/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 14/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 15/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 02/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 03/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 04/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 05/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 06/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 07/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 10/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 11/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 12/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 13/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 14/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4122550 [0] NCCL INFO Channel 15/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 00/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 01/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 02/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 03/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 04/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 06/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 07/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 08/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 09/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 10/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 11/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 12/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 14/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 15/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 00/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 01/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 02/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 03/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 06/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 07/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 08/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 09/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 10/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 11/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 14/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4122555 [5] NCCL INFO Channel 15/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 00/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 01/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 03/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 04/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 05/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 06/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 07/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 08/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 09/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 11/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 12/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 13/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 14/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 15/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 00/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 01/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 04/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 05/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 06/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 07/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 08/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 09/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 12/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 13/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 14/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4122560 [2] NCCL INFO Channel 15/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 00/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 01/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 02/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 03/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 05/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 06/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 07/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 08/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 09/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 10/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 11/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 13/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 14/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 15/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 00/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 01/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 02/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 03/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 06/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 07/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 08/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 09/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 10/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 11/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 14/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4122556 [4] NCCL INFO Channel 15/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 00/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 01/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 04/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 05/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 06/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 07/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 08/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 09/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 12/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 13/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 14/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 15/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 00/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 01/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 03/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 04/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 05/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 06/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 07/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 08/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 09/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 11/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 12/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 13/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 14/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:255071 [2] NCCL INFO Channel 15/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 02/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 03/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 04/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 05/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 06/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 07/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 10/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 11/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 12/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 13/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 14/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 15/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 01/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 02/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 03/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 04/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 05/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 06/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 07/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 09/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 10/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 11/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 12/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 13/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 14/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:255137 [0] NCCL INFO Channel 15/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 00/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 01/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 02/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 03/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 04/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 05/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 08/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 09/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 10/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 11/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 12/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 13/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 00/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 01/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 02/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 03/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 04/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 05/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 06/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 08/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 09/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 10/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 11/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 12/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 13/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:255068 [7] NCCL INFO Channel 14/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 02/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 03/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 04/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 05/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 06/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 07/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 10/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 11/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 12/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 13/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 14/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 15/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 00/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 02/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 03/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 04/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 05/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 06/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 07/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 08/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 10/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 11/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 12/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 13/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 14/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:255136 [1] NCCL INFO Channel 15/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 09/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 10/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 12/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 13/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 14/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 15/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 00/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 01/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 04/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 05/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 06/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 07/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 08/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 09/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 12/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 13/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 14/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4122557 [3] NCCL INFO Channel 15/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 09/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 10/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 11/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 12/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 13/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 00/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 01/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 02/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 03/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 04/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 05/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 07/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 08/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 09/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 10/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 11/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 12/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 13/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:255069 [6] NCCL INFO Channel 15/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 12/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 13/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 14/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 15/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 00/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 01/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 02/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 04/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 05/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 06/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 07/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 08/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 09/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 10/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 12/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 13/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 14/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:255074 [3] NCCL INFO Channel 15/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 09/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 10/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 11/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 14/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 15/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 00/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 01/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 02/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 03/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 05/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 06/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 07/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 08/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 09/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 10/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 11/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 13/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 14/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:255072 [4] NCCL INFO Channel 15/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 09/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 10/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 11/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 14/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 15/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 00/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 01/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 02/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 03/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 04/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 06/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 07/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 08/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 09/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 10/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 11/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 12/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 14/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:255073 [5] NCCL INFO Channel 15/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121592:4122553 [6] NCCL INFO Connected NVLS tree cnode7-018:4121592:4122553 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121592:4122553 [6] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121593:4122551 [7] NCCL INFO Connected NVLS tree cnode7-018:4121593:4122551 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121593:4122551 [7] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121587:4122554 [1] NCCL INFO Connected NVLS tree cnode7-018:4121587:4122554 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121587:4122554 [1] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254253:255069 [6] NCCL INFO Connected NVLS tree cnode7-019:254253:255069 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254253:255069 [6] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254250:255074 [3] NCCL INFO Connected NVLS tree cnode7-019:254250:255074 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254250:255074 [3] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121586:4122550 [0] NCCL INFO Connected NVLS tree cnode7-018:4121586:4122550 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121586:4122550 [0] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254251:255072 [4] NCCL INFO Connected NVLS tree cnode7-019:254251:255072 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254251:255072 [4] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254248:255136 [1] NCCL INFO Connected NVLS tree cnode7-019:254248:255136 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254248:255136 [1] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254252:255073 [5] NCCL INFO Connected NVLS tree cnode7-019:254252:255073 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254252:255073 [5] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121590:4122556 [4] NCCL INFO Connected NVLS tree cnode7-018:4121590:4122556 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121590:4122556 [4] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121591:4122555 [5] NCCL INFO Connected NVLS tree cnode7-018:4121591:4122555 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121591:4122555 [5] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254247:255137 [0] NCCL INFO Connected NVLS tree cnode7-019:254247:255137 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254247:255137 [0] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121589:4122557 [3] NCCL INFO Connected NVLS tree cnode7-018:4121589:4122557 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121589:4122557 [3] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121588:4122560 [2] NCCL INFO Connected NVLS tree cnode7-018:4121588:4122560 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121588:4122560 [2] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254249:255071 [2] NCCL INFO Connected NVLS tree cnode7-019:254249:255071 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254249:255071 [2] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121588:4122560 [2] NCCL INFO comm 0x55558161caa0 rank 2 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254254:255068 [7] NCCL INFO Connected NVLS tree cnode7-019:254254:255068 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254254:255068 [7] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121590:4122556 [4] NCCL INFO comm 0x55558061b4f0 rank 4 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-018:4121586:4122550 [0] NCCL INFO comm 0x55556136dea0 rank 0 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-018:4121591:4122555 [5] NCCL INFO comm 0x55558061ab60 rank 5 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-018:4121587:4122554 [1] NCCL INFO comm 0x55557bc18d00 rank 1 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-018:4121592:4122553 [6] NCCL INFO comm 0x55558461d120 rank 6 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-018:4121593:4122551 [7] NCCL INFO comm 0x55556181aa40 rank 7 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-018:4121589:4122557 [3] NCCL INFO comm 0x55557261c1c0 rank 3 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254249:255071 [2] NCCL INFO comm 0x555569a23730 rank 10 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254253:255069 [6] NCCL INFO comm 0x55556ca1a610 rank 14 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254248:255136 [1] NCCL INFO comm 0x55557261e430 rank 9 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254251:255072 [4] NCCL INFO comm 0x555560c1a420 rank 12 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254247:255137 [0] NCCL INFO comm 0x555564d9eae0 rank 8 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254254:255068 [7] NCCL INFO comm 0x55557261e8f0 rank 15 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254250:255074 [3] NCCL INFO comm 0x55557c420300 rank 11 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x41095aaa98a46250 - Init COMPLETE cnode7-019:254252:255073 [5] NCCL INFO comm 0x555580e21130 rank 13 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x41095aaa98a46250 - Init COMPLETE [2024-12-09 21:39:51,330] [INFO] [logging.py:129:log_dist] [Rank 0] DeepSpeed Flops Profiler Enabled: False [2024-12-09 21:39:51,334] [INFO] [logging.py:129:log_dist] [Rank 0] Using DeepSpeed Optimizer param name adam as basic optimizer [2024-12-09 21:39:51,334] [INFO] [logging.py:129:log_dist] [Rank 0] Removing param_group that has no 'params' in the basic Optimizer [2024-12-09 21:39:51,366] [INFO] [logging.py:129:log_dist] [Rank 0] DeepSpeed Basic Optimizer = FusedAdam [2024-12-09 21:39:51,366] [INFO] [utils.py:59:is_zero_supported_optimizer] Checking ZeRO support for optimizer=FusedAdam type= [2024-12-09 21:39:51,367] [INFO] [logging.py:129:log_dist] [Rank 0] Creating torch.bfloat16 ZeRO stage 2 optimizer [2024-12-09 21:39:51,367] [INFO] [stage_1_and_2.py:149:__init__] Reduce bucket size 500000000 [2024-12-09 21:39:51,367] [INFO] [stage_1_and_2.py:150:__init__] Allgather bucket size 500000000 [2024-12-09 21:39:51,367] [INFO] [stage_1_and_2.py:151:__init__] CPU Offload: False [2024-12-09 21:39:51,367] [INFO] [stage_1_and_2.py:152:__init__] Round robin gradient partitioning: False [2024-12-09 21:39:52,888] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:52,936] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:52,955] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:52,967] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,149] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,150] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,159] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,173] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,179] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,191] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:53,042] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,221] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:53,057] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:53,194] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:53,367] [INFO] [utils.py:781:see_memory_usage] Before initializing optimizer states [2024-12-09 21:39:53,368] [INFO] [utils.py:782:see_memory_usage] MA 1.37 GB Max_MA 1.45 GB CA 1.45 GB Max_CA 1 GB [2024-12-09 21:39:53,368] [INFO] [utils.py:789:see_memory_usage] CPU Virtual Memory: used = 63.02 GB, percent = 3.1% [2024-12-09 21:39:53,531] [INFO] [utils.py:781:see_memory_usage] After initializing optimizer states [2024-12-09 21:39:53,531] [INFO] [utils.py:782:see_memory_usage] MA 1.37 GB Max_MA 1.52 GB CA 1.6 GB Max_CA 2 GB [2024-12-09 21:39:53,531] [INFO] [utils.py:789:see_memory_usage] CPU Virtual Memory: used = 63.95 GB, percent = 3.2% [2024-12-09 21:39:53,532] [INFO] [stage_1_and_2.py:544:__init__] optimizer state initialized [2024-12-09 21:39:53,652] [INFO] [utils.py:781:see_memory_usage] After initializing ZeRO optimizer [2024-12-09 21:39:53,653] [INFO] [utils.py:782:see_memory_usage] MA 1.37 GB Max_MA 1.37 GB CA 1.6 GB Max_CA 2 GB [2024-12-09 21:39:53,653] [INFO] [utils.py:789:see_memory_usage] CPU Virtual Memory: used = 64.5 GB, percent = 3.2% [2024-12-09 21:39:53,654] [INFO] [logging.py:129:log_dist] [Rank 0] DeepSpeed Final Optimizer = DeepSpeedZeroOptimizer [2024-12-09 21:39:53,654] [INFO] [logging.py:129:log_dist] [Rank 0] DeepSpeed using client callable to create LR scheduler [2024-12-09 21:39:53,655] [INFO] [logging.py:129:log_dist] [Rank 0] DeepSpeed LR Scheduler = PiecewiseLinearWarmupLR(warmup_steps_list=[0, 30000, 60000], warmup_lr_list=[0.0, 5e-05, 0.0002]) [2024-12-09 21:39:53,655] [INFO] [logging.py:129:log_dist] [Rank 0] step=0, skipped=0, lr=[np.float64(1.6666666666666667e-09)], mom=[[0.9, 0.98]] [2024-12-09 21:39:53,656] [INFO] [config.py:999:print] DeepSpeedEngine configuration: [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] activation_checkpointing_config { "partition_activations": false, "contiguous_memory_optimization": false, "cpu_checkpointing": false, "number_checkpoints": null, "synchronize_checkpoint_boundary": false, "profile": false } [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] aio_config ................... {'block_size': 1048576, 'queue_depth': 8, 'thread_count': 1, 'single_submit': False, 'overlap_events': True, 'use_gds': False} [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] amp_enabled .................. False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] amp_params ................... False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] autotuning_config ............ { "enabled": false, "start_step": null, "end_step": null, "metric_path": null, "arg_mappings": null, "metric": "throughput", "model_info": null, "results_dir": "autotuning_results", "exps_dir": "autotuning_exps", "overwrite": true, "fast": true, "start_profile_step": 3, "end_profile_step": 5, "tuner_type": "gridsearch", "tuner_early_stopping": 5, "tuner_num_trials": 50, "model_info_path": null, "mp_size": 1, "max_train_batch_size": null, "min_train_batch_size": 1, "max_train_micro_batch_size_per_gpu": 1.024000e+03, "min_train_micro_batch_size_per_gpu": 1, "num_tuning_micro_batch_sizes": 3 } [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] bfloat16_enabled ............. True [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] bfloat16_immediate_grad_update False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] checkpoint_parallel_write_pipeline False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] checkpoint_tag_validation_enabled True [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] checkpoint_tag_validation_fail False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] comms_config ................. [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] communication_data_type ...... None [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] compression_config ........... {'weight_quantization': {'shared_parameters': {'enabled': False, 'quantizer_kernel': False, 'schedule_offset': 0, 'quantize_groups': 1, 'quantize_verbose': False, 'quantization_type': 'symmetric', 'quantize_weight_in_forward': False, 'rounding': 'nearest', 'fp16_mixed_quantize': False, 'quantize_change_ratio': 0.001}, 'different_groups': {}}, 'activation_quantization': {'shared_parameters': {'enabled': False, 'quantization_type': 'symmetric', 'range_calibration': 'dynamic', 'schedule_offset': 1000}, 'different_groups': {}}, 'sparse_pruning': {'shared_parameters': {'enabled': False, 'method': 'l1', 'schedule_offset': 1000}, 'different_groups': {}}, 'row_pruning': {'shared_parameters': {'enabled': False, 'method': 'l1', 'schedule_offset': 1000}, 'different_groups': {}}, 'head_pruning': {'shared_parameters': {'enabled': False, 'method': 'topk', 'schedule_offset': 1000}, 'different_groups': {}}, 'channel_pruning': {'shared_parameters': {'enabled': False, 'method': 'l1', 'schedule_offset': 1000}, 'different_groups': {}}, 'layer_reduction': {'enabled': False}} [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] curriculum_enabled_legacy .... False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] curriculum_params_legacy ..... False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] data_efficiency_config ....... {'enabled': False, 'seed': 1234, 'data_sampling': {'enabled': False, 'num_epochs': 1000, 'num_workers': 0, 'curriculum_learning': {'enabled': False}}, 'data_routing': {'enabled': False, 'random_ltd': {'enabled': False, 'layer_token_lr_schedule': {'enabled': False}}}} [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] data_efficiency_enabled ...... False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] dataloader_drop_last ......... False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] disable_allgather ............ False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] dump_state ................... False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] dynamic_loss_scale_args ...... None [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] eigenvalue_enabled ........... False [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] eigenvalue_gas_boundary_resolution 1 [2024-12-09 21:39:53,656] [INFO] [config.py:1003:print] eigenvalue_layer_name ........ bert.encoder.layer [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] eigenvalue_layer_num ......... 0 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] eigenvalue_max_iter .......... 100 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] eigenvalue_stability ......... 1e-06 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] eigenvalue_tol ............... 0.01 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] eigenvalue_verbose ........... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] elasticity_enabled ........... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] flops_profiler_config ........ { "enabled": false, "recompute_fwd_factor": 0.0, "profile_step": 1, "module_depth": -1, "top_modules": 1, "detailed": true, "output_file": null } [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] fp16_auto_cast ............... None [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] fp16_enabled ................. False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] fp16_master_weights_and_gradients False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] global_rank .................. 0 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] grad_accum_dtype ............. None [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] gradient_accumulation_steps .. 1 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] gradient_clipping ............ 5.0 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] gradient_predivide_factor .... 1.0 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] graph_harvesting ............. False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] hybrid_engine ................ enabled=False max_out_tokens=512 inference_tp_size=1 release_inference_cache=False pin_parameters=True tp_gather_partition_size=8 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] initial_dynamic_scale ........ 1 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] load_universal_checkpoint .... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] loss_scale ................... 1.0 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] memory_breakdown ............. False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] mics_hierarchial_params_gather False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] mics_shard_size .............. -1 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] monitor_config ............... tensorboard=TensorBoardConfig(enabled=False, output_path='', job_name='DeepSpeedJobName') comet=CometConfig(enabled=False, samples_log_interval=100, project=None, workspace=None, api_key=None, experiment_name=None, experiment_key=None, online=None, mode=None) wandb=WandbConfig(enabled=False, group=None, team=None, project='deepspeed') csv_monitor=CSVConfig(enabled=False, output_path='', job_name='DeepSpeedJobName') [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] nebula_config ................ { "enabled": false, "persistent_storage_path": null, "persistent_time_interval": 100, "num_of_version_in_retention": 2, "enable_nebula_load": true, "load_path": null } [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] optimizer_legacy_fusion ...... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] optimizer_name ............... adam [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] optimizer_params ............. {'lr': 0.0002, 'betas': [0.9, 0.98], 'eps': 1e-06, 'weight_decay': 0.0} [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] pipeline ..................... {'stages': 'auto', 'partition': 'best', 'seed_layers': False, 'activation_checkpoint_interval': 0, 'pipe_partitioned': True, 'grad_partitioned': True} [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] pld_enabled .................. False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] pld_params ................... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] prescale_gradients ........... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] scheduler_name ............... None [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] scheduler_params ............. None [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] seq_parallel_communication_data_type torch.float32 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] sparse_attention ............. None [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] sparse_gradients_enabled ..... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] steps_per_print .............. 1000 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] timers_config ................ enabled=True synchronized=True [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] train_batch_size ............. 16 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] train_micro_batch_size_per_gpu 1 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] use_data_before_expert_parallel_ False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] use_node_local_storage ....... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] wall_clock_breakdown ......... False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] weight_quantization_config ... None [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] world_size ................... 16 [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] zero_allow_untested_optimizer False [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] zero_config .................. stage=2 contiguous_gradients=True reduce_scatter=True reduce_bucket_size=500000000 use_multi_rank_bucket_allreduce=True allgather_partitions=True allgather_bucket_size=500000000 overlap_comm=True load_from_fp32_weights=True elastic_checkpoint=False offload_param=None offload_optimizer=None sub_group_size=1000000000 cpu_offload_param=None cpu_offload_use_pin_memory=None cpu_offload=None prefetch_bucket_size=50000000 param_persistence_threshold=100000 model_persistence_threshold=9223372036854775807 max_live_parameters=1000000000 max_reuse_distance=1000000000 gather_16bit_weights_on_model_save=False use_all_reduce_for_fetch_params=False stage3_gather_fp16_weights_on_model_save=False ignore_unused_parameters=True legacy_stage1=False round_robin_gradients=False zero_hpz_partition_size=1 zero_quantized_weights=False zero_quantized_nontrainable_weights=False zero_quantized_gradients=False mics_shard_size=-1 mics_hierarchical_params_gather=False memory_efficient_linear=True pipeline_loading_checkpoint=False override_module_apply=True [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] zero_enabled ................. True [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] zero_force_ds_cpu_optimizer .. True [2024-12-09 21:39:53,657] [INFO] [config.py:1003:print] zero_optimization_stage ...... 2 [2024-12-09 21:39:53,657] [INFO] [config.py:989:print_user_config] json = { "train_micro_batch_size_per_gpu": 1, "gradient_accumulation_steps": 1, "gradient_clipping": 5.0, "bf16": { "enabled": true }, "zero_optimization": { "stage": 2, "contiguous_gradients": true, "overlap_comm": true, "reduce_scatter": true, "reduce_bucket_size": 5.000000e+08, "allgather_bucket_size": 5.000000e+08 }, "optimizer": { "type": "Adam", "params": { "lr": 0.0002, "betas": [0.9, 0.98], "eps": 1e-06, "weight_decay": 0.0 } }, "wall_clock_breakdown": false, "steps_per_print": 1000 } [cnode7-018:0/16] 2024-12-09 21:39:53,666 (deepspeed_trainer:75) INFO: Resume training from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40 [2024-12-09 21:39:53,682] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,886] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... /mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/checkpoint_engine/torch_checkpoint_engine.py:28: FutureWarning: You are using `torch.load` with `weights_only=False` (the current default value), which uses the default pickle module implicitly. It is possible to construct malicious pickle data which will execute arbitrary code during unpickling (See https://github.com/pytorch/pytorch/blob/main/SECURITY.md#untrusted-models for more details). In a future release, the default value for `weights_only` will be flipped to `True`. This limits the functions that could be executed during unpickling. Arbitrary objects will no longer be allowed to be loaded via this mode unless they are explicitly allowlisted by the user via `torch.serialization.add_safe_globals`. We recommend you start setting `weights_only=True` for any use case where you don't have full control of the loaded file. Please open an issue on GitHub for any issues related to this experimental feature. partition = torch.load(path, map_location=map_location) [2024-12-09 21:39:54,138] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,138] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,138] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,138] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,139] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,140] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,144] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,343] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,343] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,343] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,345] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,345] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,346] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,346] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,210] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,215] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,215] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,216] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,217] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,218] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,221] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:55,426] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:55,427] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:55,427] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:55,428] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:55,433] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:55,434] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:55,434] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,453] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,626] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,682] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,521] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt... [2024-12-09 21:39:54,856] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,857] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,858] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,858] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,858] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,858] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:54,874] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,086] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,086] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,086] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,087] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,087] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,088] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,088] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:55,111] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,111] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,111] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,111] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,111] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,153] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,154] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt... [2024-12-09 21:39:56,360] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_12_mp_rank_00_optim_states.pt... [2024-12-09 21:39:56,360] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_8_mp_rank_00_optim_states.pt... [2024-12-09 21:39:56,361] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_11_mp_rank_00_optim_states.pt... [2024-12-09 21:39:56,361] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_9_mp_rank_00_optim_states.pt... [2024-12-09 21:39:56,361] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_10_mp_rank_00_optim_states.pt... [2024-12-09 21:39:56,389] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_15_mp_rank_00_optim_states.pt... [2024-12-09 21:39:56,399] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,453] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_14_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,299] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/mp_rank_00_model_states.pt. [2024-12-09 21:39:56,658] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_13_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,519] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_7_mp_rank_00_optim_states.pt. [2024-12-09 21:39:55,519] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 7 [2024-12-09 21:39:55,528] [INFO] [torch_checkpoint_engine.py:27:load] [Torch] Loading checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt... [2024-12-09 21:39:55,578] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 7 [2024-12-09 21:39:55,584] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_5_mp_rank_00_optim_states.pt. [2024-12-09 21:39:55,584] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 5 [2024-12-09 21:39:55,648] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 5 [2024-12-09 21:39:56,849] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_15_mp_rank_00_optim_states.pt. [2024-12-09 21:39:56,849] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 15 [2024-12-09 21:39:55,713] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_6_mp_rank_00_optim_states.pt. [2024-12-09 21:39:55,713] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 6 [2024-12-09 21:39:55,740] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_1_mp_rank_00_optim_states.pt. [2024-12-09 21:39:55,741] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 1 [2024-12-09 21:39:55,764] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_4_mp_rank_00_optim_states.pt. [2024-12-09 21:39:55,764] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 4 [2024-12-09 21:39:55,778] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 6 [2024-12-09 21:39:55,798] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 1 [2024-12-09 21:39:56,984] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_8_mp_rank_00_optim_states.pt. [2024-12-09 21:39:56,984] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 8 [2024-12-09 21:39:56,995] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_10_mp_rank_00_optim_states.pt. [2024-12-09 21:39:56,995] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 10 [2024-12-09 21:39:55,825] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 4 [2024-12-09 21:39:57,015] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 15 [2024-12-09 21:39:57,043] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_11_mp_rank_00_optim_states.pt. [2024-12-09 21:39:57,043] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 11 [2024-12-09 21:39:57,052] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 8 [2024-12-09 21:39:57,064] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 10 [2024-12-09 21:39:57,068] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_9_mp_rank_00_optim_states.pt. [2024-12-09 21:39:57,068] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 9 [2024-12-09 21:39:55,931] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_3_mp_rank_00_optim_states.pt. [2024-12-09 21:39:55,931] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 3 [2024-12-09 21:39:57,110] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_12_mp_rank_00_optim_states.pt. [2024-12-09 21:39:57,110] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 12 [2024-12-09 21:39:57,111] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 11 [2024-12-09 21:39:55,958] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_2_mp_rank_00_optim_states.pt. [2024-12-09 21:39:55,958] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 2 [2024-12-09 21:39:57,133] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 9 [2024-12-09 21:39:55,990] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 3 [2024-12-09 21:39:57,180] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 12 [2024-12-09 21:39:56,017] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 2 [2024-12-09 21:39:57,244] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_14_mp_rank_00_optim_states.pt. [2024-12-09 21:39:57,244] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 14 [2024-12-09 21:39:57,301] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 14 [2024-12-09 21:39:57,321] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_13_mp_rank_00_optim_states.pt. [2024-12-09 21:39:57,321] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 13 [2024-12-09 21:39:56,169] [INFO] [torch_checkpoint_engine.py:29:load] [Torch] Loaded checkpoint from exp_owsm/s2t_train_05b_ds_raw_bpe50000/checkpoint_40/40/bf16_zero_pp_rank_0_mp_rank_00_optim_states.pt. [2024-12-09 21:39:56,169] [INFO] [engine.py:3076:_get_all_zero_checkpoint_state_dicts] successfully read 16 ZeRO state_dicts for rank 0 [2024-12-09 21:39:57,394] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 13 [2024-12-09 21:39:56,235] [INFO] [engine.py:3026:_load_zero_checkpoint] loading 16 zero partition checkpoints for rank 0 [cnode7-018:0/16] 2024-12-09 21:39:56,263 (multiple_iter_factory:32) INFO: Building 0th iter-factory... [cnode7-018:0/16] 2024-12-09 21:40:22,360 (s2t:444) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') [cnode7-018:0/16] 2024-12-09 21:40:38,764 (abs_task:1807) INFO: [train] dataset: ESPnetDataset( speech: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp/split.3", "type": "kaldi_ark"} text_prev: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev/split.3", "type": "text"} text_ctc: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc/split.3", "type": "text"} text: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text/split.3", "type": "text"} preprocess: ) [cnode7-018:0/16] 2024-12-09 21:40:38,764 (abs_task:1808) INFO: [train] Batch sampler: SortedBatchSampler(N-batch=28521, batch_size=256, shape_file=exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape/split.3, sort_in_batch=descending, sort_batch=descending) [cnode7-018:0/16] 2024-12-09 21:40:38,767 (abs_task:1809) INFO: [train] mini-batch sizes summary: N-batch=28521, mean=256.0, min=256, max=257 [2024-12-09 21:41:02,712] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:02,970] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:03,041] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:04,417] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:03,585] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:03,737] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:04,949] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:03,811] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:03,986] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:05,168] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:05,303] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:04,506] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:04,527] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:04,670] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:06,184] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:15,681] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:50,509] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:51,204] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:50,186] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:51,496] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:51,016] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:51,239] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:52,574] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:53,090] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:52,021] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:52,065] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:52,567] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:53,907] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:53,179] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:41:58,981] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:00,669] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:02,390] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:38,245] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:37,380] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:38,545] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:37,711] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:40,015] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:40,261] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:39,367] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:40,403] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:41,750] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:40,968] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:41,101] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:41,458] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:43,500] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:50,068] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:52,802] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:42:54,384] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:23,873] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:25,240] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:24,199] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:26,111] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:27,874] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:27,916] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:27,514] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:29,247] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:28,150] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:29,217] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:29,520] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:29,846] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:35,274] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:43:37,437] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-018:4121586:4130280 [0] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121586:4130280 [0] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-018:4121588:4130390 [2] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121588:4130390 [2] NCCL INFO Using network IB cnode7-019:254252:262603 [5] NCCL INFO Using non-device net plugin version 0 cnode7-019:254252:262603 [5] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-019:254251:262689 [4] NCCL INFO Using non-device net plugin version 0 cnode7-019:254251:262689 [4] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): [2024-12-09 21:43:55,540] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) cnode7-019:254254:262979 [7] NCCL INFO Using non-device net plugin version 0 cnode7-019:254254:262979 [7] NCCL INFO Using network IB cnode7-019:254247:263051 [0] NCCL INFO Using non-device net plugin version 0 cnode7-019:254247:263051 [0] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-018:4121590:4130770 [4] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121590:4130770 [4] NCCL INFO Using network IB cnode7-019:254253:263150 [6] NCCL INFO Using non-device net plugin version 0 cnode7-019:254253:263150 [6] NCCL INFO Using network IB [2024-12-09 21:43:56,065] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) cnode7-018:4121591:4130855 [5] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121591:4130855 [5] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-018:4121587:4131179 [1] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121587:4131179 [1] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-018:4121589:4131265 [3] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121589:4131265 [3] NCCL INFO Using network IB cnode7-018:4121592:4131283 [6] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121592:4131283 [6] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-019:254250:263786 [3] NCCL INFO Using non-device net plugin version 0 cnode7-019:254250:263786 [3] NCCL INFO Using network IB cnode7-019:254248:263787 [1] NCCL INFO Using non-device net plugin version 0 cnode7-019:254248:263787 [1] NCCL INFO Using network IB /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): /mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py:279: FutureWarning: `torch.cuda.amp.autocast(args...)` is deprecated. Please use `torch.amp.autocast('cuda', args...)` instead. with autocast(False): cnode7-019:254249:264136 [2] NCCL INFO Using non-device net plugin version 0 cnode7-019:254249:264136 [2] NCCL INFO Using network IB cnode7-018:4121586:4130280 [0] NCCL INFO comm 0x5555b6eecd50 rank 0 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121588:4130390 [2] NCCL INFO comm 0x5555c0f13ed0 rank 2 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121593:4131920 [7] NCCL INFO Using non-device net plugin version 0 cnode7-018:4121593:4131920 [7] NCCL INFO Using network IB cnode7-018:4121593:4131920 [7] NCCL INFO comm 0x5555c0ef0150 rank 7 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254250:263786 [3] NCCL INFO comm 0x5555c0f045c0 rank 11 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254252:262603 [5] NCCL INFO comm 0x5555be7676e0 rank 13 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121591:4130855 [5] NCCL INFO comm 0x5555d4f03e10 rank 5 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254253:263150 [6] NCCL INFO comm 0x5555c0ef7e60 rank 14 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254251:262689 [4] NCCL INFO comm 0x5555d4f0da80 rank 12 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121590:4130770 [4] NCCL INFO comm 0x5555c0eff350 rank 4 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254254:262979 [7] NCCL INFO comm 0x5555c0ef04c0 rank 15 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254248:263787 [1] NCCL INFO comm 0x5555c0f35e80 rank 9 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254247:263051 [0] NCCL INFO comm 0x5555b6f2c1f0 rank 8 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121587:4131179 [1] NCCL INFO comm 0x5555c0f01fe0 rank 1 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121589:4131265 [3] NCCL INFO comm 0x5555c0ef4b50 rank 3 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121592:4131283 [6] NCCL INFO comm 0x5555c0f26f20 rank 6 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x5c7edeaa1a974a64 - Init START cnode7-019:254249:264136 [2] NCCL INFO comm 0x5555c0f2fd40 rank 10 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x5c7edeaa1a974a64 - Init START cnode7-018:4121588:4130390 [2] NCCL INFO Setting affinity for GPU 2 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121588:4130390 [2] NCCL INFO NVLS multicast support is available on dev 2 cnode7-018:4121593:4131920 [7] NCCL INFO Setting affinity for GPU 7 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS multicast support is available on dev 7 cnode7-018:4121586:4130280 [0] NCCL INFO Setting affinity for GPU 0 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121586:4130280 [0] NCCL INFO NVLS multicast support is available on dev 0 cnode7-019:254252:262603 [5] NCCL INFO Setting affinity for GPU 5 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254252:262603 [5] NCCL INFO NVLS multicast support is available on dev 5 cnode7-018:4121589:4131265 [3] NCCL INFO Setting affinity for GPU 3 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121589:4131265 [3] NCCL INFO NVLS multicast support is available on dev 3 cnode7-018:4121592:4131283 [6] NCCL INFO Setting affinity for GPU 6 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS multicast support is available on dev 6 cnode7-018:4121591:4130855 [5] NCCL INFO Setting affinity for GPU 5 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS multicast support is available on dev 5 cnode7-018:4121590:4130770 [4] NCCL INFO Setting affinity for GPU 4 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS multicast support is available on dev 4 cnode7-019:254248:263787 [1] NCCL INFO Setting affinity for GPU 1 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254248:263787 [1] NCCL INFO NVLS multicast support is available on dev 1 cnode7-018:4121587:4131179 [1] NCCL INFO Setting affinity for GPU 1 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-018:4121587:4131179 [1] NCCL INFO NVLS multicast support is available on dev 1 cnode7-019:254249:264136 [2] NCCL INFO Setting affinity for GPU 2 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254249:264136 [2] NCCL INFO NVLS multicast support is available on dev 2 cnode7-019:254250:263786 [3] NCCL INFO Setting affinity for GPU 3 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254250:263786 [3] NCCL INFO NVLS multicast support is available on dev 3 cnode7-019:254253:263150 [6] NCCL INFO Setting affinity for GPU 6 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254253:263150 [6] NCCL INFO NVLS multicast support is available on dev 6 cnode7-019:254253:263150 [6] NCCL INFO comm 0x5555c0ef7e60 rank 14 nRanks 16 nNodes 2 localRanks 8 localRank 6 MNNVL 0 cnode7-019:254253:263150 [6] NCCL INFO Trees [0] 15/-1/-1->14->13 [1] 15/-1/-1->14->13 [2] 15/-1/-1->14->13 [3] 15/-1/-1->14->13 [4] 15/-1/-1->14->13 [5] 15/-1/-1->14->13 [6] 15/-1/-1->14->6 [7] -1/-1/-1->14->13 [8] 15/-1/-1->14->13 [9] 15/-1/-1->14->13 [10] 15/-1/-1->14->13 [11] 15/-1/-1->14->13 [12] 15/-1/-1->14->13 [13] 15/-1/-1->14->13 [14] 15/6/-1->14->-1 [15] -1/-1/-1->14->13 cnode7-019:254253:263150 [6] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254254:262979 [7] NCCL INFO Setting affinity for GPU 7 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254254:262979 [7] NCCL INFO NVLS multicast support is available on dev 7 cnode7-019:254254:262979 [7] NCCL INFO comm 0x5555c0ef04c0 rank 15 nRanks 16 nNodes 2 localRanks 8 localRank 7 MNNVL 0 cnode7-019:254254:262979 [7] NCCL INFO Trees [0] -1/-1/-1->15->14 [1] 8/-1/-1->15->14 [2] 8/-1/-1->15->14 [3] 8/-1/-1->15->14 [4] 8/-1/-1->15->14 [5] 8/-1/-1->15->14 [6] 8/-1/-1->15->14 [7] 8/-1/-1->15->7 [8] -1/-1/-1->15->14 [9] 8/-1/-1->15->14 [10] 8/-1/-1->15->14 [11] 8/-1/-1->15->14 [12] 8/-1/-1->15->14 [13] 8/-1/-1->15->14 [14] 8/-1/-1->15->14 [15] 8/7/-1->15->-1 cnode7-019:254254:262979 [7] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254249:264136 [2] NCCL INFO comm 0x5555c0f2fd40 rank 10 nRanks 16 nNodes 2 localRanks 8 localRank 2 MNNVL 0 cnode7-019:254249:264136 [2] NCCL INFO Trees [0] 11/-1/-1->10->9 [1] 11/-1/-1->10->9 [2] 11/-1/-1->10->2 [3] -1/-1/-1->10->9 [4] 11/-1/-1->10->9 [5] 11/-1/-1->10->9 [6] 11/-1/-1->10->9 [7] 11/-1/-1->10->9 [8] 11/-1/-1->10->9 [9] 11/-1/-1->10->9 [10] 11/2/-1->10->-1 [11] -1/-1/-1->10->9 [12] 11/-1/-1->10->9 [13] 11/-1/-1->10->9 [14] 11/-1/-1->10->9 [15] 11/-1/-1->10->9 cnode7-019:254249:264136 [2] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121586:4130280 [0] NCCL INFO comm 0x5555b6eecd50 rank 0 nRanks 16 nNodes 2 localRanks 8 localRank 0 MNNVL 0 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121586:4130280 [0] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 00/16 : 0 7 6 5 4 3 2 1 9 10 11 12 13 14 15 8 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 01/16 : 0 8 15 14 13 12 11 10 9 1 2 3 4 5 6 7 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 02/16 : 0 7 6 5 4 3 11 12 13 14 15 8 9 10 2 1 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 03/16 : 0 1 2 10 9 8 15 14 13 12 11 3 4 5 6 7 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 04/16 : 0 7 6 5 13 14 15 8 9 10 11 12 4 3 2 1 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 05/16 : 0 1 2 3 4 12 11 10 9 8 15 14 13 5 6 7 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 06/16 : 0 7 15 8 9 10 11 12 13 14 6 5 4 3 2 1 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 07/16 : 0 1 2 3 4 5 6 14 13 12 11 10 9 8 15 7 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 08/16 : 0 7 6 5 4 3 2 1 9 10 11 12 13 14 15 8 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 09/16 : 0 8 15 14 13 12 11 10 9 1 2 3 4 5 6 7 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 10/16 : 0 7 6 5 4 3 11 12 13 14 15 8 9 10 2 1 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 11/16 : 0 1 2 10 9 8 15 14 13 12 11 3 4 5 6 7 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 12/16 : 0 7 6 5 13 14 15 8 9 10 11 12 4 3 2 1 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 13/16 : 0 1 2 3 4 12 11 10 9 8 15 14 13 5 6 7 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 14/16 : 0 7 15 8 9 10 11 12 13 14 6 5 4 3 2 1 cnode7-018:4121586:4130280 [0] NCCL INFO Channel 15/16 : 0 1 2 3 4 5 6 14 13 12 11 10 9 8 15 7 cnode7-018:4121586:4130280 [0] NCCL INFO Trees [0] 1/8/-1->0->-1 [1] -1/-1/-1->0->7 [2] 1/-1/-1->0->7 [3] 1/-1/-1->0->7 [4] 1/-1/-1->0->7 [5] 1/-1/-1->0->7 [6] 1/-1/-1->0->7 [7] 1/-1/-1->0->7 [8] 1/-1/-1->0->8 [9] -1/-1/-1->0->7 [10] 1/-1/-1->0->7 [11] 1/-1/-1->0->7 [12] 1/-1/-1->0->7 [13] 1/-1/-1->0->7 [14] 1/-1/-1->0->7 [15] 1/-1/-1->0->7 cnode7-018:4121586:4130280 [0] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121588:4130390 [2] NCCL INFO comm 0x5555c0f13ed0 rank 2 nRanks 16 nNodes 2 localRanks 8 localRank 2 MNNVL 0 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121588:4130390 [2] NCCL INFO Trees [0] 3/-1/-1->2->1 [1] 3/-1/-1->2->1 [2] 3/10/-1->2->-1 [3] -1/-1/-1->2->1 [4] 3/-1/-1->2->1 [5] 3/-1/-1->2->1 [6] 3/-1/-1->2->1 [7] 3/-1/-1->2->1 [8] 3/-1/-1->2->1 [9] 3/-1/-1->2->1 [10] 3/-1/-1->2->10 [11] -1/-1/-1->2->1 [12] 3/-1/-1->2->1 [13] 3/-1/-1->2->1 [14] 3/-1/-1->2->1 [15] 3/-1/-1->2->1 cnode7-018:4121588:4130390 [2] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121591:4130855 [5] NCCL INFO comm 0x5555d4f03e10 rank 5 nRanks 16 nNodes 2 localRanks 8 localRank 5 MNNVL 0 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121591:4130855 [5] NCCL INFO Trees [0] 6/-1/-1->5->4 [1] 6/-1/-1->5->4 [2] 6/-1/-1->5->4 [3] 6/-1/-1->5->4 [4] 6/-1/-1->5->4 [5] 6/13/-1->5->-1 [6] -1/-1/-1->5->4 [7] 6/-1/-1->5->4 [8] 6/-1/-1->5->4 [9] 6/-1/-1->5->4 [10] 6/-1/-1->5->4 [11] 6/-1/-1->5->4 [12] 6/-1/-1->5->4 [13] 6/-1/-1->5->13 [14] -1/-1/-1->5->4 [15] 6/-1/-1->5->4 cnode7-018:4121591:4130855 [5] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121590:4130770 [4] NCCL INFO comm 0x5555c0eff350 rank 4 nRanks 16 nNodes 2 localRanks 8 localRank 4 MNNVL 0 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121590:4130770 [4] NCCL INFO Trees [0] 5/-1/-1->4->3 [1] 5/-1/-1->4->3 [2] 5/-1/-1->4->3 [3] 5/-1/-1->4->3 [4] 5/12/-1->4->-1 [5] -1/-1/-1->4->3 [6] 5/-1/-1->4->3 [7] 5/-1/-1->4->3 [8] 5/-1/-1->4->3 [9] 5/-1/-1->4->3 [10] 5/-1/-1->4->3 [11] 5/-1/-1->4->3 [12] 5/-1/-1->4->12 [13] -1/-1/-1->4->3 [14] 5/-1/-1->4->3 [15] 5/-1/-1->4->3 cnode7-018:4121590:4130770 [4] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121592:4131283 [6] NCCL INFO comm 0x5555c0f26f20 rank 6 nRanks 16 nNodes 2 localRanks 8 localRank 6 MNNVL 0 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121592:4131283 [6] NCCL INFO Trees [0] 7/-1/-1->6->5 [1] 7/-1/-1->6->5 [2] 7/-1/-1->6->5 [3] 7/-1/-1->6->5 [4] 7/-1/-1->6->5 [5] 7/-1/-1->6->5 [6] 7/14/-1->6->-1 [7] -1/-1/-1->6->5 [8] 7/-1/-1->6->5 [9] 7/-1/-1->6->5 [10] 7/-1/-1->6->5 [11] 7/-1/-1->6->5 [12] 7/-1/-1->6->5 [13] 7/-1/-1->6->5 [14] 7/-1/-1->6->14 [15] -1/-1/-1->6->5 cnode7-018:4121592:4131283 [6] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254248:263787 [1] NCCL INFO comm 0x5555c0f35e80 rank 9 nRanks 16 nNodes 2 localRanks 8 localRank 1 MNNVL 0 cnode7-019:254248:263787 [1] NCCL INFO Trees [0] 10/-1/-1->9->8 [1] 10/-1/-1->9->1 [2] -1/-1/-1->9->8 [3] 10/-1/-1->9->8 [4] 10/-1/-1->9->8 [5] 10/-1/-1->9->8 [6] 10/-1/-1->9->8 [7] 10/-1/-1->9->8 [8] 10/-1/-1->9->8 [9] 10/1/-1->9->-1 [10] -1/-1/-1->9->8 [11] 10/-1/-1->9->8 [12] 10/-1/-1->9->8 [13] 10/-1/-1->9->8 [14] 10/-1/-1->9->8 [15] 10/-1/-1->9->8 cnode7-019:254248:263787 [1] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121587:4131179 [1] NCCL INFO comm 0x5555c0f01fe0 rank 1 nRanks 16 nNodes 2 localRanks 8 localRank 1 MNNVL 0 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121587:4131179 [1] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121587:4131179 [1] NCCL INFO Trees [0] 2/-1/-1->1->0 [1] 2/9/-1->1->-1 [2] -1/-1/-1->1->0 [3] 2/-1/-1->1->0 [4] 2/-1/-1->1->0 [5] 2/-1/-1->1->0 [6] 2/-1/-1->1->0 [7] 2/-1/-1->1->0 [8] 2/-1/-1->1->0 [9] 2/-1/-1->1->9 [10] -1/-1/-1->1->0 [11] 2/-1/-1->1->0 [12] 2/-1/-1->1->0 [13] 2/-1/-1->1->0 [14] 2/-1/-1->1->0 [15] 2/-1/-1->1->0 cnode7-018:4121587:4131179 [1] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121589:4131265 [3] NCCL INFO comm 0x5555c0ef4b50 rank 3 nRanks 16 nNodes 2 localRanks 8 localRank 3 MNNVL 0 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121589:4131265 [3] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121589:4131265 [3] NCCL INFO Trees [0] 4/-1/-1->3->2 [1] 4/-1/-1->3->2 [2] 4/-1/-1->3->2 [3] 4/11/-1->3->-1 [4] -1/-1/-1->3->2 [5] 4/-1/-1->3->2 [6] 4/-1/-1->3->2 [7] 4/-1/-1->3->2 [8] 4/-1/-1->3->2 [9] 4/-1/-1->3->2 [10] 4/-1/-1->3->2 [11] 4/-1/-1->3->11 [12] -1/-1/-1->3->2 [13] 4/-1/-1->3->2 [14] 4/-1/-1->3->2 [15] 4/-1/-1->3->2 cnode7-018:4121589:4131265 [3] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121593:4131920 [7] NCCL INFO comm 0x5555c0ef0150 rank 7 nRanks 16 nNodes 2 localRanks 8 localRank 7 MNNVL 0 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 0: 0 8 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 1: 1 9 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 2: 2 10 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 3: 3 11 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 4: 4 12 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 5: 5 13 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 6: 6 14 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS Head 7: 7 15 cnode7-018:4121593:4131920 [7] NCCL INFO Trees [0] -1/-1/-1->7->6 [1] 0/-1/-1->7->6 [2] 0/-1/-1->7->6 [3] 0/-1/-1->7->6 [4] 0/-1/-1->7->6 [5] 0/-1/-1->7->6 [6] 0/-1/-1->7->6 [7] 0/15/-1->7->-1 [8] -1/-1/-1->7->6 [9] 0/-1/-1->7->6 [10] 0/-1/-1->7->6 [11] 0/-1/-1->7->6 [12] 0/-1/-1->7->6 [13] 0/-1/-1->7->6 [14] 0/-1/-1->7->6 [15] 0/-1/-1->7->15 cnode7-018:4121593:4131920 [7] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254251:262689 [4] NCCL INFO Setting affinity for GPU 4 to ffffffff,ffffff00,00000000,0000ffff,ffffffff,ff000000,00000000 cnode7-019:254251:262689 [4] NCCL INFO NVLS multicast support is available on dev 4 cnode7-019:254251:262689 [4] NCCL INFO comm 0x5555d4f0da80 rank 12 nRanks 16 nNodes 2 localRanks 8 localRank 4 MNNVL 0 cnode7-019:254251:262689 [4] NCCL INFO Trees [0] 13/-1/-1->12->11 [1] 13/-1/-1->12->11 [2] 13/-1/-1->12->11 [3] 13/-1/-1->12->11 [4] 13/-1/-1->12->4 [5] -1/-1/-1->12->11 [6] 13/-1/-1->12->11 [7] 13/-1/-1->12->11 [8] 13/-1/-1->12->11 [9] 13/-1/-1->12->11 [10] 13/-1/-1->12->11 [11] 13/-1/-1->12->11 [12] 13/4/-1->12->-1 [13] -1/-1/-1->12->11 [14] 13/-1/-1->12->11 [15] 13/-1/-1->12->11 cnode7-019:254251:262689 [4] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254252:262603 [5] NCCL INFO comm 0x5555be7676e0 rank 13 nRanks 16 nNodes 2 localRanks 8 localRank 5 MNNVL 0 cnode7-019:254252:262603 [5] NCCL INFO Trees [0] 14/-1/-1->13->12 [1] 14/-1/-1->13->12 [2] 14/-1/-1->13->12 [3] 14/-1/-1->13->12 [4] 14/-1/-1->13->12 [5] 14/-1/-1->13->5 [6] -1/-1/-1->13->12 [7] 14/-1/-1->13->12 [8] 14/-1/-1->13->12 [9] 14/-1/-1->13->12 [10] 14/-1/-1->13->12 [11] 14/-1/-1->13->12 [12] 14/-1/-1->13->12 [13] 14/5/-1->13->-1 [14] -1/-1/-1->13->12 [15] 14/-1/-1->13->12 cnode7-019:254252:262603 [5] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254247:263051 [0] NCCL INFO Setting affinity for GPU 0 to ff,ffffffff,ffff0000,00000000,00ffffff,ffffffff cnode7-019:254247:263051 [0] NCCL INFO NVLS multicast support is available on dev 0 cnode7-019:254247:263051 [0] NCCL INFO comm 0x5555b6f2c1f0 rank 8 nRanks 16 nNodes 2 localRanks 8 localRank 0 MNNVL 0 cnode7-019:254247:263051 [0] NCCL INFO Trees [0] 9/-1/-1->8->0 [1] -1/-1/-1->8->15 [2] 9/-1/-1->8->15 [3] 9/-1/-1->8->15 [4] 9/-1/-1->8->15 [5] 9/-1/-1->8->15 [6] 9/-1/-1->8->15 [7] 9/-1/-1->8->15 [8] 9/0/-1->8->-1 [9] -1/-1/-1->8->15 [10] 9/-1/-1->8->15 [11] 9/-1/-1->8->15 [12] 9/-1/-1->8->15 [13] 9/-1/-1->8->15 [14] 9/-1/-1->8->15 [15] 9/-1/-1->8->15 cnode7-019:254247:263051 [0] NCCL INFO P2P Chunksize set to 131072 cnode7-019:254248:263787 [1] NCCL INFO Channel 00/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 02/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 04/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 06/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 08/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 10/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 12/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 14/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 02/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 04/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 06/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 10/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 12/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 14/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 01/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 03/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 05/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO comm 0x5555c0f045c0 rank 11 nRanks 16 nNodes 2 localRanks 8 localRank 3 MNNVL 0 cnode7-019:254250:263786 [3] NCCL INFO Trees [0] 12/-1/-1->11->10 [1] 12/-1/-1->11->10 [2] 12/-1/-1->11->10 [3] 12/-1/-1->11->3 [4] -1/-1/-1->11->10 [5] 12/-1/-1->11->10 [6] 12/-1/-1->11->10 [7] 12/-1/-1->11->10 [8] 12/-1/-1->11->10 [9] 12/-1/-1->11->10 [10] 12/-1/-1->11->10 [11] 12/3/-1->11->-1 [12] -1/-1/-1->11->10 [13] 12/-1/-1->11->10 [14] 12/-1/-1->11->10 [15] 12/-1/-1->11->10 cnode7-019:254250:263786 [3] NCCL INFO P2P Chunksize set to 131072 cnode7-018:4121587:4131179 [1] NCCL INFO Channel 01/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 03/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 05/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 07/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 09/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 11/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 13/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 15/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 01/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 03/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 05/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 07/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 09/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 11/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 13/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 15/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 00/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 04/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 06/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 08/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 12/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 14/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 03/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 05/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 07/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 11/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 13/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 15/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 00/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 02/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 04/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 06/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 08/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 10/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 12/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 14/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 01/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 05/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 07/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 09/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 13/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 15/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 02/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 10/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 03/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 11/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 00/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 02/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 04/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 06/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 08/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 10/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 12/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 14/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 04/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 12/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 05/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 13/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 01/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 03/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 05/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 07/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 09/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 11/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 13/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 15/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 00/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 02/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 04/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 08/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 10/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 12/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 07/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 15/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 06/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 14/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 00/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 02/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 06/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 08/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 10/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 14/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 06/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 14/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 07/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 15/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 00/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 02/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 04/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 06/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 08/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 10/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 12/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 14/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 01/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 03/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 05/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 09/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 11/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 13/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 01/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 03/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 07/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 09/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 11/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 15/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 04/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 12/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 05/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 13/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 00/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 08/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 01/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 09/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 03/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 05/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 07/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 11/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 13/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 15/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 07/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 09/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 11/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 13/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 15/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 01/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 09/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 00/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 08/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 01/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 09/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 00/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 08/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 02/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 04/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 06/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 10/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 12/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 14/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 03/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 11/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 02/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 10/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 00/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 04/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 06/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 08/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 12/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 14/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 01/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 03/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 05/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 09/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 11/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 13/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 06/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 14/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 07/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 15/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 00/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 02/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 04/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 06/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 08/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 10/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 12/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 14/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 03/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 11/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 02/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 10/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 01/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 03/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 05/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 07/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 09/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 11/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 13/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 15/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 00/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 08/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 01/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 09/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 00/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 02/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 04/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 06/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 08/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 10/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 12/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 14/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 07/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 15/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 06/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 14/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 01/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 03/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 05/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 07/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 09/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 11/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 13/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 15/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 00/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 02/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 04/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 08/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 10/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 12/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 00/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 02/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 04/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 06/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 08/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 10/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 12/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 14/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 02/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 10/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 03/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 11/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 01/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 05/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 07/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 09/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 13/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 15/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 01/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 03/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 07/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 09/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 11/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 15/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 05/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 13/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 04/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 12/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 00/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 02/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 06/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 08/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 10/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 14/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 01/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 03/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 05/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 07/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 09/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 11/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 13/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 15/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 05/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 13/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 04/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 12/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 01/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 03/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 05/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 07/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 09/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 11/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 13/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 15/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 00/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 02/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 04/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 06/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 08/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 10/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 12/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 14/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Connected all rings cnode7-019:254247:263051 [0] NCCL INFO Connected all rings cnode7-019:254247:263051 [0] NCCL INFO Channel 00/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 03/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 05/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 07/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 08/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 11/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Connected all rings cnode7-018:4121587:4131179 [1] NCCL INFO Channel 00/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 04/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 06/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 08/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 12/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 14/0 : 1[1] -> 2[2] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 01/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 09/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Connected all rings cnode7-018:4121592:4131283 [6] NCCL INFO Connected all rings cnode7-018:4121592:4131283 [6] NCCL INFO Channel 00/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 02/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 04/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 06/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 08/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 10/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 12/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 14/0 : 6[6] -> 7[7] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Connected all rings cnode7-019:254249:264136 [2] NCCL INFO Channel 01/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 02/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 05/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 07/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 09/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 10/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 13/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 15/0 : 10[2] -> 11[3] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 02/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 10/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Connected all rings cnode7-018:4121586:4130280 [0] NCCL INFO Channel 00/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 02/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 04/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 06/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 08/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 10/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 12/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 14/0 : 0[0] -> 1[1] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 01/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 03/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 05/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 07/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 09/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 11/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 13/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 15/0 : 0[0] -> 7[7] via P2P/CUMEM cnode7-018:4121586:4130280 [0] NCCL INFO Channel 00/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 08/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Connected all rings cnode7-018:4121588:4130390 [2] NCCL INFO Channel 00/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 02/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 04/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 06/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 08/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 10/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 12/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 14/0 : 2[2] -> 3[3] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 02/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 10/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 01/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 03/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 05/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 07/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 09/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 11/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 13/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121588:4130390 [2] NCCL INFO Channel 15/0 : 2[2] -> 1[1] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Connected all rings cnode7-018:4121593:4131920 [7] NCCL INFO Channel 07/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 15/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 02/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 04/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 06/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 10/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 12/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 14/0 : 7[7] -> 0[0] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 01/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 03/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 05/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 06/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 09/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 11/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 13/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Connected all rings cnode7-019:254250:263786 [3] NCCL INFO Channel 01/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 03/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 05/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 07/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 09/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 11/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 13/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 15/0 : 11[3] -> 12[4] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 03/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 11/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 00/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 02/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 04/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 06/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 08/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 10/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 12/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254250:263786 [3] NCCL INFO Channel 14/0 : 11[3] -> 10[2] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Connected all rings cnode7-019:254252:262603 [5] NCCL INFO Channel 01/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 03/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 05/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 07/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 09/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 11/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 13/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 15/0 : 13[5] -> 14[6] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 05/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 13/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 00/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 02/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 04/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 06/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 08/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 10/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 12/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-019:254252:262603 [5] NCCL INFO Channel 14/0 : 13[5] -> 12[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Connected all rings cnode7-018:4121591:4130855 [5] NCCL INFO Channel 00/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 02/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 04/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 08/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 10/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 12/0 : 5[5] -> 6[6] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 05/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 13/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 01/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 03/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 04/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 07/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 09/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 11/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 12/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-018:4121591:4130855 [5] NCCL INFO Channel 15/0 : 5[5] -> 4[4] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Connected all rings cnode7-019:254253:263150 [6] NCCL INFO Channel 01/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 03/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 05/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 06/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 09/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 11/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 13/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 14/0 : 14[6] -> 15[7] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 06/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 14/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 00/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 02/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 04/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 08/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 10/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254253:263150 [6] NCCL INFO Channel 12/0 : 14[6] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Connected all rings cnode7-019:254251:262689 [4] NCCL INFO Channel 01/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 03/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 04/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 07/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 09/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 11/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 12/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 15/0 : 12[4] -> 13[5] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 04/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 12/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 00/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 02/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 06/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 08/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 10/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254251:262689 [4] NCCL INFO Channel 14/0 : 12[4] -> 11[3] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Connected all rings cnode7-019:254254:262979 [7] NCCL INFO Channel 07/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 15/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 01/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 03/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 05/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 07/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 09/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 11/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 13/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 15/0 : 15[7] -> 8[0] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 00/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 02/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 04/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 06/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 08/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 10/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 12/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-019:254254:262979 [7] NCCL INFO Channel 14/0 : 15[7] -> 14[6] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Connected all rings cnode7-018:4121590:4130770 [4] NCCL INFO Channel 00/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 02/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 04/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 06/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 08/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 10/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 12/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 14/0 : 4[4] -> 5[5] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 04/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 12/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 01/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 03/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 05/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 07/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 09/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 11/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 13/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-018:4121590:4130770 [4] NCCL INFO Channel 15/0 : 4[4] -> 3[3] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 01/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 03/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 05/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 07/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 09/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 11/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 13/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 15/0 : 9[1] -> 10[2] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 01/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 09/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 00/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 02/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 04/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 06/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 08/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 10/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 12/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254248:263787 [1] NCCL INFO Channel 14/0 : 9[1] -> 8[0] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 13/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 15/0 : 8[0] -> 9[1] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 02/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 04/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 06/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 10/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 12/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 14/0 : 8[0] -> 15[7] via P2P/CUMEM cnode7-019:254247:263051 [0] NCCL INFO Channel 00/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 08/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 00/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 03/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 05/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 07/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 08/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 11/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 13/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121587:4131179 [1] NCCL INFO Channel 15/0 : 1[1] -> 0[0] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 00/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 02/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 06/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 08/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 10/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 14/0 : 3[3] -> 4[4] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 03/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 11/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 01/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 02/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 05/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 07/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 09/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 10/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 13/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Channel 15/0 : 3[3] -> 2[2] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 06/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 14/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 01/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 03/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 05/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 07/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 09/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 11/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 13/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-018:4121592:4131283 [6] NCCL INFO Channel 15/0 : 6[6] -> 5[5] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 00/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 04/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 06/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 08/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 12/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-019:254249:264136 [2] NCCL INFO Channel 14/0 : 10[2] -> 9[1] via P2P/CUMEM cnode7-018:4121593:4131920 [7] NCCL INFO Channel 14/0 : 7[7] -> 6[6] via P2P/CUMEM cnode7-018:4121589:4131265 [3] NCCL INFO Connected all trees cnode7-019:254250:263786 [3] NCCL INFO Connected all trees cnode7-019:254249:264136 [2] NCCL INFO Connected all trees cnode7-018:4121588:4130390 [2] NCCL INFO Connected all trees cnode7-019:254251:262689 [4] NCCL INFO Connected all trees cnode7-018:4121590:4130770 [4] NCCL INFO Connected all trees cnode7-019:254252:262603 [5] NCCL INFO Connected all trees cnode7-018:4121591:4130855 [5] NCCL INFO Connected all trees cnode7-018:4121586:4130280 [0] NCCL INFO Connected all trees cnode7-018:4121587:4131179 [1] NCCL INFO Connected all trees cnode7-018:4121593:4131920 [7] NCCL INFO Connected all trees cnode7-019:254253:263150 [6] NCCL INFO Connected all trees cnode7-019:254254:262979 [7] NCCL INFO Connected all trees cnode7-018:4121592:4131283 [6] NCCL INFO Connected all trees cnode7-019:254247:263051 [0] NCCL INFO Connected all trees cnode7-018:4121589:4131265 [3] NCCL INFO NVLS comm 0x5555c0ef4b50 headRank 3 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254248:263787 [1] NCCL INFO Connected all trees cnode7-019:254249:264136 [2] NCCL INFO NVLS comm 0x5555c0f2fd40 headRank 2 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121588:4130390 [2] NCCL INFO NVLS comm 0x5555c0f13ed0 headRank 2 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121593:4131920 [7] NCCL INFO NVLS comm 0x5555c0ef0150 headRank 7 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254250:263786 [3] NCCL INFO NVLS comm 0x5555c0f045c0 headRank 3 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254252:262603 [5] NCCL INFO NVLS comm 0x5555be7676e0 headRank 5 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121591:4130855 [5] NCCL INFO NVLS comm 0x5555d4f03e10 headRank 5 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254253:263150 [6] NCCL INFO NVLS comm 0x5555c0ef7e60 headRank 6 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254251:262689 [4] NCCL INFO NVLS comm 0x5555d4f0da80 headRank 4 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254254:262979 [7] NCCL INFO NVLS comm 0x5555c0ef04c0 headRank 7 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121590:4130770 [4] NCCL INFO NVLS comm 0x5555c0eff350 headRank 4 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254248:263787 [1] NCCL INFO NVLS comm 0x5555c0f35e80 headRank 1 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254248:263787 [1] NCCL INFO Channel 02/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 03/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 04/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 05/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 06/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 07/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 10/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 11/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 12/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 13/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 14/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 15/0 : 1[1] -> 9[1] [receive] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 00/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 02/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 03/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 04/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 05/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 06/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO NVLS comm 0x5555b6f2c1f0 headRank 0 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254247:263051 [0] NCCL INFO Channel 02/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 03/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 04/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 05/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 06/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 07/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 10/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 11/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 12/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 13/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 14/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 15/0 : 0[0] -> 8[0] [receive] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 01/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 02/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 03/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 04/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 05/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 06/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 07/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 09/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 10/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 11/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 12/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 13/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 14/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-019:254247:263051 [0] NCCL INFO Channel 15/0 : 8[0] -> 0[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO NVLS comm 0x5555c0f01fe0 headRank 1 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121592:4131283 [6] NCCL INFO NVLS comm 0x5555c0f26f20 headRank 6 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-019:254249:264136 [2] NCCL INFO Channel 00/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 01/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 04/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 05/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 06/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 07/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 08/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 09/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 12/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 13/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 14/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 15/0 : 2[2] -> 10[2] [receive] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 00/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 01/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 03/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 04/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 05/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 06/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 07/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 08/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 09/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 11/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 12/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 13/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 14/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-019:254249:264136 [2] NCCL INFO Channel 15/0 : 10[2] -> 2[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO NVLS comm 0x5555b6eecd50 headRank 0 nHeads 8 buffSize 4194304 memSize 2097152 nvlsPerRankSize 201326592 nvlsTotalSize 1610612736 cnode7-018:4121593:4131920 [7] NCCL INFO Channel 00/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 01/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 02/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 03/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 04/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 05/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 06/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 08/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 09/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 10/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 11/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 12/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 13/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 14/0 : 15[7] -> 7[7] [receive] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 00/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 01/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 02/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 03/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 04/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 05/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 08/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 09/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 10/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 11/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 12/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121593:4131920 [7] NCCL INFO Channel 13/0 : 7[7] -> 15[7] [send] via NET/IB/8/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 00/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 01/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 04/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 05/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 06/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 07/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 08/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 09/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 12/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 13/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 14/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 15/0 : 3[3] -> 11[3] [receive] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 00/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 01/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 02/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 04/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 05/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 06/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 07/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 08/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 09/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 10/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 12/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 13/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 14/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254250:263786 [3] NCCL INFO Channel 15/0 : 11[3] -> 3[3] [send] via NET/IB/3/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 00/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 01/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 02/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 03/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 06/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 07/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 08/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 09/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 10/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 11/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 14/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 15/0 : 5[5] -> 13[5] [receive] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 00/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 01/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 02/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 03/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 04/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 06/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 07/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 08/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 09/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 10/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 11/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 12/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 14/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-019:254252:262603 [5] NCCL INFO Channel 15/0 : 13[5] -> 5[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 00/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 01/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 02/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 03/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 04/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 06/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 07/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 08/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 09/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 10/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 11/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 12/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 14/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 15/0 : 13[5] -> 5[5] [receive] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 00/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 01/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 02/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 03/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 06/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 07/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 08/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 09/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 10/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 11/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 14/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-018:4121591:4130855 [5] NCCL INFO Channel 15/0 : 5[5] -> 13[5] [send] via NET/IB/6/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 00/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 01/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 02/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 03/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 04/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 05/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 08/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 09/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 10/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 11/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 12/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 13/0 : 6[6] -> 14[6] [receive] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 00/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 01/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 02/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 03/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 04/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 05/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 07/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 08/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 09/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 10/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 11/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 12/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 13/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Channel 15/0 : 14[6] -> 6[6] [send] via NET/IB/7/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 00/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 01/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 02/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 03/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 06/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 07/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 08/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 09/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 10/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 11/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 14/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 15/0 : 4[4] -> 12[4] [receive] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 00/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 01/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 02/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 03/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 05/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 06/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 07/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 08/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 09/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 10/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 11/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 13/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 14/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254251:262689 [4] NCCL INFO Channel 15/0 : 12[4] -> 4[4] [send] via NET/IB/4/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 00/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 01/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 02/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 03/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 04/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 05/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 08/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 09/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 10/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 11/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 12/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 13/0 : 7[7] -> 15[7] [receive] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 00/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 01/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 02/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 03/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 04/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 05/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 06/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 08/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 09/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 10/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 11/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 12/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 13/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-019:254254:262979 [7] NCCL INFO Channel 14/0 : 15[7] -> 7[7] [send] via NET/IB/8/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 00/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 01/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 02/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 03/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 05/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 06/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 07/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 08/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 09/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 10/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 11/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 13/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 14/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 15/0 : 12[4] -> 4[4] [receive] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 00/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 01/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 02/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 03/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 06/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 07/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 08/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 09/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 10/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 11/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 14/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-018:4121590:4130770 [4] NCCL INFO Channel 15/0 : 4[4] -> 12[4] [send] via NET/IB/4/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 07/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 08/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 10/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 11/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 12/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 13/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 14/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-019:254248:263787 [1] NCCL INFO Channel 15/0 : 9[1] -> 1[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 00/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 02/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 03/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 04/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 05/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 06/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 07/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 08/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 10/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 11/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 12/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 13/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 14/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 15/0 : 9[1] -> 1[1] [receive] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 02/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 03/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 04/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 05/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 06/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 07/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 10/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 11/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 12/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 13/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 14/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121587:4131179 [1] NCCL INFO Channel 15/0 : 1[1] -> 9[1] [send] via NET/IB/1/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 00/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 01/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 02/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 04/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 05/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 06/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 07/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 08/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 09/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 10/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 12/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 13/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 14/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 15/0 : 11[3] -> 3[3] [receive] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 00/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 01/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 04/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 05/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 06/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 07/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 08/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 09/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 12/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 13/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 14/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121589:4131265 [3] NCCL INFO Channel 15/0 : 3[3] -> 11[3] [send] via NET/IB/3/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 00/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 01/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 02/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 03/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 04/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 05/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 07/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 08/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 09/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 10/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 11/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 12/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 13/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 15/0 : 14[6] -> 6[6] [receive] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 00/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 01/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 02/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 03/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 04/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 05/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 08/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 09/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 10/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 11/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 12/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121592:4131283 [6] NCCL INFO Channel 13/0 : 6[6] -> 14[6] [send] via NET/IB/7/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 01/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 02/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 03/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 04/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 05/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 06/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 07/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 09/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 10/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 11/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 12/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 13/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 14/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 15/0 : 8[0] -> 0[0] [receive] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 02/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 03/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 04/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 05/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 06/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 07/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 10/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 11/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 12/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 13/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 14/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121586:4130280 [0] NCCL INFO Channel 15/0 : 0[0] -> 8[0] [send] via NET/IB/0/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 00/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 01/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 03/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 04/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 05/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 06/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 07/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 08/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 09/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 11/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 12/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 13/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 14/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 15/0 : 10[2] -> 2[2] [receive] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 00/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 01/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 04/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 05/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 06/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 07/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 08/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 09/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 12/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 13/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 14/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-018:4121588:4130390 [2] NCCL INFO Channel 15/0 : 2[2] -> 10[2] [send] via NET/IB/2/GDRDMA cnode7-019:254253:263150 [6] NCCL INFO Connected NVLS tree cnode7-019:254253:263150 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254253:263150 [6] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121588:4130390 [2] NCCL INFO Connected NVLS tree cnode7-018:4121588:4130390 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121588:4130390 [2] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121587:4131179 [1] NCCL INFO Connected NVLS tree cnode7-018:4121587:4131179 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121587:4131179 [1] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121592:4131283 [6] NCCL INFO Connected NVLS tree cnode7-018:4121592:4131283 [6] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121592:4131283 [6] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121586:4130280 [0] NCCL INFO Connected NVLS tree cnode7-018:4121586:4130280 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121586:4130280 [0] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254251:262689 [4] NCCL INFO Connected NVLS tree cnode7-019:254251:262689 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254251:262689 [4] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254254:262979 [7] NCCL INFO Connected NVLS tree cnode7-019:254254:262979 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254254:262979 [7] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121590:4130770 [4] NCCL INFO Connected NVLS tree cnode7-018:4121590:4130770 [4] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121590:4130770 [4] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254249:264136 [2] NCCL INFO Connected NVLS tree cnode7-019:254249:264136 [2] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254249:264136 [2] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254247:263051 [0] NCCL INFO Connected NVLS tree cnode7-019:254247:263051 [0] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254247:263051 [0] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254248:263787 [1] NCCL INFO Connected NVLS tree cnode7-019:254248:263787 [1] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254248:263787 [1] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121589:4131265 [3] NCCL INFO Connected NVLS tree cnode7-018:4121589:4131265 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121589:4131265 [3] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254250:263786 [3] NCCL INFO Connected NVLS tree cnode7-019:254250:263786 [3] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254250:263786 [3] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121591:4130855 [5] NCCL INFO Connected NVLS tree cnode7-018:4121591:4130855 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121591:4130855 [5] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121593:4131920 [7] NCCL INFO Connected NVLS tree cnode7-018:4121593:4131920 [7] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-018:4121593:4131920 [7] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-018:4121589:4131265 [3] NCCL INFO comm 0x5555c0ef4b50 rank 3 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-018:4121592:4131283 [6] NCCL INFO comm 0x5555c0f26f20 rank 6 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254252:262603 [5] NCCL INFO Connected NVLS tree cnode7-019:254252:262603 [5] NCCL INFO threadThresholds 8/8/64 | 128/8/64 | 512 | 512 cnode7-019:254252:262603 [5] NCCL INFO 16 coll channels, 0 collnet channels, 16 nvls channels, 16 p2p channels, 2 p2p channels per peer cnode7-019:254249:264136 [2] NCCL INFO comm 0x5555c0f2fd40 rank 10 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-018:4121591:4130855 [5] NCCL INFO comm 0x5555d4f03e10 rank 5 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-018:4121587:4131179 [1] NCCL INFO comm 0x5555c0f01fe0 rank 1 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-018:4121593:4131920 [7] NCCL INFO comm 0x5555c0ef0150 rank 7 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-018:4121590:4130770 [4] NCCL INFO comm 0x5555c0eff350 rank 4 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-018:4121586:4130280 [0] NCCL INFO comm 0x5555b6eecd50 rank 0 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-018:4121588:4130390 [2] NCCL INFO comm 0x5555c0f13ed0 rank 2 nranks 16 cudaDev 2 nvmlDev 2 busId 52000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254247:263051 [0] NCCL INFO comm 0x5555b6f2c1f0 rank 8 nranks 16 cudaDev 0 nvmlDev 0 busId 1b000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254248:263787 [1] NCCL INFO comm 0x5555c0f35e80 rank 9 nranks 16 cudaDev 1 nvmlDev 1 busId 43000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254251:262689 [4] NCCL INFO comm 0x5555d4f0da80 rank 12 nranks 16 cudaDev 4 nvmlDev 4 busId 9d000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254253:263150 [6] NCCL INFO comm 0x5555c0ef7e60 rank 14 nranks 16 cudaDev 6 nvmlDev 6 busId d1000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254254:262979 [7] NCCL INFO comm 0x5555c0ef04c0 rank 15 nranks 16 cudaDev 7 nvmlDev 7 busId df000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254250:263786 [3] NCCL INFO comm 0x5555c0f045c0 rank 11 nranks 16 cudaDev 3 nvmlDev 3 busId 61000 commId 0x5c7edeaa1a974a64 - Init COMPLETE cnode7-019:254252:262603 [5] NCCL INFO comm 0x5555be7676e0 rank 13 nranks 16 cudaDev 5 nvmlDev 5 busId c3000 commId 0x5c7edeaa1a974a64 - Init COMPLETE [cnode7-018:0/16] 2024-12-09 21:45:06,797 (deepspeed_trainer:228) INFO: 41epoch:train:1-100batch: iter_time=2.337, loss_ctc=70.493, loss_att=55.159, acc=0.719, loss=59.755, grad_norm=6.256, loss_scale=1.000, learning_rate=6.324e-05, step_time=0.772 [cnode7-018:0/16] 2024-12-09 21:45:43,837 (deepspeed_trainer:228) INFO: 41epoch:train:101-200batch: iter_time=1.106e-04, loss_ctc=86.779, loss_att=61.706, acc=0.714, loss=69.243, grad_norm=8.270, loss_scale=1.000, learning_rate=6.324e-05, step_time=0.370 [cnode7-018:0/16] 2024-12-09 21:46:19,125 (deepspeed_trainer:228) INFO: 41epoch:train:201-300batch: iter_time=1.136e-04, loss_ctc=72.656, loss_att=53.732, acc=0.715, loss=59.420, grad_norm=5.929, loss_scale=1.000, learning_rate=6.323e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 21:46:55,909 (deepspeed_trainer:228) INFO: 41epoch:train:301-400batch: iter_time=1.120e-04, loss_ctc=67.644, loss_att=51.600, acc=0.720, loss=56.409, grad_norm=6.135, loss_scale=1.000, learning_rate=6.323e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 21:47:32,947 (deepspeed_trainer:228) INFO: 41epoch:train:401-500batch: iter_time=1.132e-04, loss_ctc=69.188, loss_att=52.025, acc=0.725, loss=57.177, grad_norm=5.768, loss_scale=1.000, learning_rate=6.322e-05, step_time=0.370 [cnode7-018:0/16] 2024-12-09 21:48:09,274 (deepspeed_trainer:228) INFO: 41epoch:train:501-600batch: iter_time=1.142e-04, loss_ctc=64.921, loss_att=46.976, acc=0.722, loss=52.349, grad_norm=6.160, loss_scale=1.000, learning_rate=6.322e-05, step_time=0.363 [cnode7-018:0/16] 2024-12-09 21:48:45,528 (deepspeed_trainer:228) INFO: 41epoch:train:601-700batch: iter_time=1.150e-04, loss_ctc=61.761, loss_att=44.815, acc=0.733, loss=49.898, grad_norm=5.330, loss_scale=1.000, learning_rate=6.321e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 21:49:22,112 (deepspeed_trainer:228) INFO: 41epoch:train:701-800batch: iter_time=1.134e-04, loss_ctc=69.635, loss_att=51.684, acc=0.718, loss=57.075, grad_norm=6.047, loss_scale=1.000, learning_rate=6.321e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 21:49:58,852 (deepspeed_trainer:228) INFO: 41epoch:train:801-900batch: iter_time=1.145e-04, loss_ctc=74.800, loss_att=51.061, acc=0.714, loss=58.160, grad_norm=5.810, loss_scale=1.000, learning_rate=6.320e-05, step_time=0.367 [2024-12-09 21:50:35,482] [INFO] [logging.py:129:log_dist] [Rank 0] step=601000, skipped=0, lr=[np.float64(6.319286179204039e-05)], mom=[[0.9, 0.98]] [2024-12-09 21:50:35,483] [INFO] [timer.py:264:stop] epoch=0/micro_step=1000/global_step=1000, RunningAvgSamplesPerSec=44.60887797370504, CurrSamplesPerSec=42.84431859400918, MemAllocated=1.77GB, MaxMemAllocated=15.01GB [cnode7-018:0/16] 2024-12-09 21:50:35,484 (deepspeed_trainer:228) INFO: 41epoch:train:901-1000batch: iter_time=1.147e-04, loss_ctc=78.754, loss_att=58.789, acc=0.708, loss=64.790, grad_norm=6.749, loss_scale=1.000, learning_rate=6.320e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 21:51:12,100 (deepspeed_trainer:228) INFO: 41epoch:train:1001-1100batch: iter_time=1.129e-04, loss_ctc=62.257, loss_att=49.632, acc=0.736, loss=53.427, grad_norm=5.667, loss_scale=1.000, learning_rate=6.319e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 21:51:48,403 (deepspeed_trainer:228) INFO: 41epoch:train:1101-1200batch: iter_time=1.094e-04, loss_ctc=70.981, loss_att=52.946, acc=0.714, loss=58.352, grad_norm=6.415, loss_scale=1.000, learning_rate=6.318e-05, step_time=0.363 [cnode7-018:0/16] 2024-12-09 21:52:24,596 (deepspeed_trainer:228) INFO: 41epoch:train:1201-1300batch: iter_time=1.125e-04, loss_ctc=62.998, loss_att=46.200, acc=0.725, loss=51.252, grad_norm=5.491, loss_scale=1.000, learning_rate=6.318e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 21:53:01,304 (deepspeed_trainer:228) INFO: 41epoch:train:1301-1400batch: iter_time=1.090e-04, loss_ctc=75.104, loss_att=54.370, acc=0.711, loss=60.593, grad_norm=6.481, loss_scale=1.000, learning_rate=6.317e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 21:53:37,556 (deepspeed_trainer:228) INFO: 41epoch:train:1401-1500batch: iter_time=1.096e-04, loss_ctc=66.628, loss_att=47.409, acc=0.732, loss=53.177, grad_norm=5.429, loss_scale=1.000, learning_rate=6.317e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 21:54:13,971 (deepspeed_trainer:228) INFO: 41epoch:train:1501-1600batch: iter_time=1.084e-04, loss_ctc=70.369, loss_att=47.816, acc=0.724, loss=54.585, grad_norm=5.783, loss_scale=1.000, learning_rate=6.316e-05, step_time=0.364 [cnode7-018:0/16] 2024-12-09 21:54:50,249 (deepspeed_trainer:228) INFO: 41epoch:train:1601-1700batch: iter_time=1.076e-04, loss_ctc=56.674, loss_att=48.263, acc=0.710, loss=50.779, grad_norm=6.006, loss_scale=1.000, learning_rate=6.316e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 21:55:26,715 (deepspeed_trainer:228) INFO: 41epoch:train:1701-1800batch: iter_time=1.091e-04, loss_ctc=72.733, loss_att=56.419, acc=0.712, loss=61.304, grad_norm=6.422, loss_scale=1.000, learning_rate=6.315e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 21:55:57,118 (multiple_iter_factory:32) INFO: Building 1th iter-factory... [cnode7-018:0/16] 2024-12-09 21:56:22,910 (s2t:444) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') [cnode7-018:0/16] 2024-12-09 21:56:37,952 (abs_task:1807) INFO: [train] dataset: ESPnetDataset( speech: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp/split.7", "type": "kaldi_ark"} text_prev: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev/split.7", "type": "text"} text_ctc: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc/split.7", "type": "text"} text: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text/split.7", "type": "text"} preprocess: ) [cnode7-018:0/16] 2024-12-09 21:56:37,952 (abs_task:1808) INFO: [train] Batch sampler: SortedBatchSampler(N-batch=28521, batch_size=256, shape_file=exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape/split.7, sort_in_batch=descending, sort_batch=descending) [cnode7-018:0/16] 2024-12-09 21:56:37,954 (abs_task:1809) INFO: [train] mini-batch sizes summary: N-batch=28521, mean=256.0, min=256, max=257 [2024-12-09 21:57:05,610] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:05,632] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:06,591] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:07,147] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:09,677] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:09,961] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:10,336] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:10,455] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:08,027] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:10,626] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:08,493] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:11,236] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:08,775] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:08,985] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:13,429] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:16,202] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:53,098] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:54,009] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:54,457] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:58,181] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:55,912] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:58,456] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:56,151] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:56,595] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:59,364] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:59,410] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:57,188] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:00,010] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:00,047] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:57:57,653] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:01,781] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:04,001] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:40,760] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:41,587] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:42,554] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:43,385] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:46,145] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:44,855] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:47,610] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:45,160] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:47,962] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:48,288] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:45,876] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:48,690] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:48,737] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:46,487] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:50,201] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:58:51,328] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:28,291] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:28,973] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:29,937] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:31,598] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:34,245] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:32,114] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:34,931] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:33,707] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:36,566] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:34,101] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:34,359] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:37,111] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:37,624] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:37,746] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:38,085] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 21:59:39,778] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [cnode7-018:0/16] 2024-12-09 22:00:13,144 (deepspeed_trainer:228) INFO: 41epoch:train:1801-1900batch: iter_time=2.429, loss_ctc=69.484, loss_att=49.533, acc=0.726, loss=55.498, grad_norm=5.905, loss_scale=1.000, learning_rate=6.315e-05, step_time=0.435 [2024-12-09 22:00:50,183] [INFO] [logging.py:129:log_dist] [Rank 0] step=602000, skipped=0, lr=[np.float64(6.314035429915395e-05)], mom=[[0.9, 0.98]] [2024-12-09 22:00:50,183] [INFO] [timer.py:264:stop] epoch=0/micro_step=2000/global_step=2000, RunningAvgSamplesPerSec=44.23631721607979, CurrSamplesPerSec=41.278858159312136, MemAllocated=1.77GB, MaxMemAllocated=15.68GB [cnode7-018:0/16] 2024-12-09 22:00:50,185 (deepspeed_trainer:228) INFO: 41epoch:train:1901-2000batch: iter_time=1.066e-04, loss_ctc=73.124, loss_att=57.263, acc=0.722, loss=62.005, grad_norm=6.513, loss_scale=1.000, learning_rate=6.314e-05, step_time=0.370 [cnode7-018:0/16] 2024-12-09 22:01:27,187 (deepspeed_trainer:228) INFO: 41epoch:train:2001-2100batch: iter_time=1.086e-04, loss_ctc=82.354, loss_att=56.686, acc=0.722, loss=64.379, grad_norm=7.643, loss_scale=1.000, learning_rate=6.314e-05, step_time=0.370 [cnode7-018:0/16] 2024-12-09 22:02:03,863 (deepspeed_trainer:228) INFO: 41epoch:train:2101-2200batch: iter_time=1.094e-04, loss_ctc=69.712, loss_att=56.190, acc=0.717, loss=60.270, grad_norm=6.724, loss_scale=1.000, learning_rate=6.313e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 22:02:40,813 (deepspeed_trainer:228) INFO: 41epoch:train:2201-2300batch: iter_time=1.091e-04, loss_ctc=68.808, loss_att=50.114, acc=0.721, loss=55.718, grad_norm=5.604, loss_scale=1.000, learning_rate=6.313e-05, step_time=0.369 [cnode7-018:0/16] 2024-12-09 22:03:18,231 (deepspeed_trainer:228) INFO: 41epoch:train:2301-2400batch: iter_time=1.098e-04, loss_ctc=67.248, loss_att=49.087, acc=0.731, loss=54.538, grad_norm=5.616, loss_scale=1.000, learning_rate=6.312e-05, step_time=0.374 [cnode7-018:0/16] 2024-12-09 22:03:55,236 (deepspeed_trainer:228) INFO: 41epoch:train:2401-2500batch: iter_time=1.130e-04, loss_ctc=64.899, loss_att=46.301, acc=0.729, loss=51.871, grad_norm=5.414, loss_scale=1.000, learning_rate=6.312e-05, step_time=0.370 [cnode7-018:0/16] 2024-12-09 22:04:32,061 (deepspeed_trainer:228) INFO: 41epoch:train:2501-2600batch: iter_time=1.092e-04, loss_ctc=61.916, loss_att=43.334, acc=0.739, loss=48.892, grad_norm=5.644, loss_scale=1.000, learning_rate=6.311e-05, step_time=0.368 [cnode7-018:0/16] 2024-12-09 22:05:09,006 (deepspeed_trainer:228) INFO: 41epoch:train:2601-2700batch: iter_time=1.096e-04, loss_ctc=74.425, loss_att=52.172, acc=0.714, loss=58.845, grad_norm=6.151, loss_scale=1.000, learning_rate=6.311e-05, step_time=0.369 [cnode7-018:0/16] 2024-12-09 22:05:45,509 (deepspeed_trainer:228) INFO: 41epoch:train:2701-2800batch: iter_time=1.092e-04, loss_ctc=70.818, loss_att=51.437, acc=0.711, loss=57.264, grad_norm=6.371, loss_scale=1.000, learning_rate=6.310e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:06:21,684 (deepspeed_trainer:228) INFO: 41epoch:train:2801-2900batch: iter_time=1.112e-04, loss_ctc=73.594, loss_att=53.991, acc=0.724, loss=59.855, grad_norm=6.923, loss_scale=1.000, learning_rate=6.310e-05, step_time=0.361 [2024-12-09 22:06:58,192] [INFO] [logging.py:129:log_dist] [Rank 0] step=603000, skipped=0, lr=[np.float64(6.308797747591709e-05)], mom=[[0.9, 0.98]] [2024-12-09 22:06:58,193] [INFO] [timer.py:264:stop] epoch=0/micro_step=3000/global_step=3000, RunningAvgSamplesPerSec=44.28735577034643, CurrSamplesPerSec=46.39815633814928, MemAllocated=1.77GB, MaxMemAllocated=15.68GB [cnode7-018:0/16] 2024-12-09 22:06:58,194 (deepspeed_trainer:228) INFO: 41epoch:train:2901-3000batch: iter_time=1.100e-04, loss_ctc=62.174, loss_att=52.105, acc=0.725, loss=55.111, grad_norm=5.991, loss_scale=1.000, learning_rate=6.309e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:07:34,637 (deepspeed_trainer:228) INFO: 41epoch:train:3001-3100batch: iter_time=1.104e-04, loss_ctc=69.737, loss_att=50.776, acc=0.720, loss=56.479, grad_norm=6.261, loss_scale=1.000, learning_rate=6.309e-05, step_time=0.364 [cnode7-018:0/16] 2024-12-09 22:08:10,837 (deepspeed_trainer:228) INFO: 41epoch:train:3101-3200batch: iter_time=1.099e-04, loss_ctc=66.846, loss_att=46.998, acc=0.732, loss=52.943, grad_norm=5.168, loss_scale=1.000, learning_rate=6.308e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 22:08:47,428 (deepspeed_trainer:228) INFO: 41epoch:train:3201-3300batch: iter_time=1.101e-04, loss_ctc=69.536, loss_att=51.494, acc=0.721, loss=56.907, grad_norm=6.868, loss_scale=1.000, learning_rate=6.307e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:09:24,061 (deepspeed_trainer:228) INFO: 41epoch:train:3301-3400batch: iter_time=1.098e-04, loss_ctc=72.909, loss_att=52.612, acc=0.722, loss=58.693, grad_norm=5.480, loss_scale=1.000, learning_rate=6.307e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 22:10:01,274 (deepspeed_trainer:228) INFO: 41epoch:train:3401-3500batch: iter_time=1.113e-04, loss_ctc=62.770, loss_att=45.294, acc=0.720, loss=50.547, grad_norm=5.618, loss_scale=1.000, learning_rate=6.306e-05, step_time=0.372 [cnode7-018:0/16] 2024-12-09 22:10:37,550 (deepspeed_trainer:228) INFO: 41epoch:train:3501-3600batch: iter_time=1.088e-04, loss_ctc=60.837, loss_att=49.553, acc=0.715, loss=52.946, grad_norm=5.862, loss_scale=1.000, learning_rate=6.306e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 22:11:14,351 (deepspeed_trainer:228) INFO: 41epoch:train:3601-3700batch: iter_time=1.106e-04, loss_ctc=66.537, loss_att=50.029, acc=0.726, loss=54.992, grad_norm=6.098, loss_scale=1.000, learning_rate=6.305e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:11:35,166 (multiple_iter_factory:32) INFO: Building 2th iter-factory... [cnode7-018:0/16] 2024-12-09 22:12:01,317 (s2t:444) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') [cnode7-018:0/16] 2024-12-09 22:12:16,830 (abs_task:1807) INFO: [train] dataset: ESPnetDataset( speech: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp/split.4", "type": "kaldi_ark"} text_prev: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev/split.4", "type": "text"} text_ctc: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc/split.4", "type": "text"} text: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text/split.4", "type": "text"} preprocess: ) [cnode7-018:0/16] 2024-12-09 22:12:16,830 (abs_task:1808) INFO: [train] Batch sampler: SortedBatchSampler(N-batch=28521, batch_size=256, shape_file=exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape/split.4, sort_in_batch=descending, sort_batch=descending) [cnode7-018:0/16] 2024-12-09 22:12:16,833 (abs_task:1809) INFO: [train] mini-batch sizes summary: N-batch=28521, mean=256.0, min=256, max=257 [2024-12-09 22:12:44,090] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:44,636] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:45,062] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:48,062] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:48,418] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:46,763] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:49,560] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:47,147] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:49,637] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:49,813] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:47,838] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:47,902] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:47,967] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:50,461] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:50,546] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:12:50,548] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:32,108] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:32,846] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:33,122] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:36,589] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:36,800] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:34,906] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:38,123] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:35,653] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:38,342] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:38,588] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:36,148] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:36,376] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:39,084] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:36,755] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:39,507] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:13:43,352] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:20,415] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:21,705] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:22,777] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:24,909] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:23,455] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:24,339] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:24,539] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:26,514] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:24,928] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:27,258] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:27,750] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:28,142] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:29,923] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:30,140] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:31,353] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:14:33,558] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:08,611] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:10,337] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:13,078] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:11,532] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:14,865] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:12,994] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:15,185] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:13,610] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:13,747] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:16,309] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:15,163] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:19,525] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:21,134] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:22,509] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:22,960] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:15:26,007] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [cnode7-018:0/16] 2024-12-09 22:16:09,646 (deepspeed_trainer:228) INFO: 41epoch:train:3701-3800batch: iter_time=2.437, loss_ctc=71.748, loss_att=53.254, acc=0.727, loss=58.812, grad_norm=6.242, loss_scale=1.000, learning_rate=6.305e-05, step_time=0.510 [cnode7-018:0/16] 2024-12-09 22:16:47,207 (deepspeed_trainer:228) INFO: 41epoch:train:3801-3900batch: iter_time=1.075e-04, loss_ctc=73.640, loss_att=59.705, acc=0.718, loss=63.940, grad_norm=6.318, loss_scale=1.000, learning_rate=6.304e-05, step_time=0.375 [2024-12-09 22:17:24,292] [INFO] [logging.py:129:log_dist] [Rank 0] step=604000, skipped=0, lr=[np.float64(6.303573078125529e-05)], mom=[[0.9, 0.98]] [2024-12-09 22:17:24,293] [INFO] [timer.py:264:stop] epoch=0/micro_step=4000/global_step=4000, RunningAvgSamplesPerSec=43.89092513782602, CurrSamplesPerSec=45.76073844983995, MemAllocated=1.77GB, MaxMemAllocated=15.69GB [cnode7-018:0/16] 2024-12-09 22:17:24,295 (deepspeed_trainer:228) INFO: 41epoch:train:3901-4000batch: iter_time=1.077e-04, loss_ctc=78.089, loss_att=54.532, acc=0.726, loss=61.608, grad_norm=7.131, loss_scale=1.000, learning_rate=6.304e-05, step_time=0.371 [cnode7-018:0/16] 2024-12-09 22:18:01,090 (deepspeed_trainer:228) INFO: 41epoch:train:4001-4100batch: iter_time=1.088e-04, loss_ctc=73.924, loss_att=58.759, acc=0.720, loss=63.309, grad_norm=5.967, loss_scale=1.000, learning_rate=6.303e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:18:37,550 (deepspeed_trainer:228) INFO: 41epoch:train:4101-4200batch: iter_time=1.085e-04, loss_ctc=61.759, loss_att=44.836, acc=0.734, loss=49.901, grad_norm=5.070, loss_scale=1.000, learning_rate=6.303e-05, step_time=0.364 [cnode7-018:0/16] 2024-12-09 22:19:14,270 (deepspeed_trainer:228) INFO: 41epoch:train:4201-4300batch: iter_time=1.076e-04, loss_ctc=69.964, loss_att=50.612, acc=0.730, loss=56.415, grad_norm=5.638, loss_scale=1.000, learning_rate=6.302e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:19:51,106 (deepspeed_trainer:228) INFO: 41epoch:train:4301-4400batch: iter_time=1.080e-04, loss_ctc=62.356, loss_att=42.549, acc=0.748, loss=48.499, grad_norm=5.811, loss_scale=1.000, learning_rate=6.302e-05, step_time=0.368 [cnode7-018:0/16] 2024-12-09 22:20:27,875 (deepspeed_trainer:228) INFO: 41epoch:train:4401-4500batch: iter_time=1.139e-04, loss_ctc=63.594, loss_att=47.933, acc=0.732, loss=52.611, grad_norm=5.423, loss_scale=1.000, learning_rate=6.301e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:21:04,735 (deepspeed_trainer:228) INFO: 41epoch:train:4501-4600batch: iter_time=1.095e-04, loss_ctc=74.052, loss_att=50.537, acc=0.734, loss=57.593, grad_norm=6.061, loss_scale=1.000, learning_rate=6.301e-05, step_time=0.368 [cnode7-018:0/16] 2024-12-09 22:21:41,468 (deepspeed_trainer:228) INFO: 41epoch:train:4601-4700batch: iter_time=1.120e-04, loss_ctc=70.787, loss_att=53.955, acc=0.711, loss=59.014, grad_norm=6.127, loss_scale=1.000, learning_rate=6.300e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:22:18,051 (deepspeed_trainer:228) INFO: 41epoch:train:4701-4800batch: iter_time=1.105e-04, loss_ctc=72.519, loss_att=54.186, acc=0.734, loss=59.711, grad_norm=6.139, loss_scale=1.000, learning_rate=6.300e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:22:54,245 (deepspeed_trainer:228) INFO: 41epoch:train:4801-4900batch: iter_time=1.096e-04, loss_ctc=65.105, loss_att=51.575, acc=0.730, loss=55.651, grad_norm=5.367, loss_scale=1.000, learning_rate=6.299e-05, step_time=0.361 [2024-12-09 22:23:30,417] [INFO] [logging.py:129:log_dist] [Rank 0] step=605000, skipped=0, lr=[np.float64(6.298361367722558e-05)], mom=[[0.9, 0.98]] [2024-12-09 22:23:30,417] [INFO] [timer.py:264:stop] epoch=0/micro_step=5000/global_step=5000, RunningAvgSamplesPerSec=44.03546450725595, CurrSamplesPerSec=46.433405899518846, MemAllocated=1.77GB, MaxMemAllocated=15.69GB [cnode7-018:0/16] 2024-12-09 22:23:30,419 (deepspeed_trainer:228) INFO: 41epoch:train:4901-5000batch: iter_time=1.110e-04, loss_ctc=66.606, loss_att=47.738, acc=0.728, loss=53.383, grad_norm=5.826, loss_scale=1.000, learning_rate=6.299e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 22:24:07,100 (deepspeed_trainer:228) INFO: 41epoch:train:5001-5100batch: iter_time=1.097e-04, loss_ctc=69.533, loss_att=52.105, acc=0.730, loss=57.329, grad_norm=5.526, loss_scale=1.000, learning_rate=6.298e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 22:24:43,863 (deepspeed_trainer:228) INFO: 41epoch:train:5101-5200batch: iter_time=1.107e-04, loss_ctc=65.987, loss_att=48.018, acc=0.730, loss=53.413, grad_norm=5.807, loss_scale=1.000, learning_rate=6.298e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:25:20,736 (deepspeed_trainer:228) INFO: 41epoch:train:5201-5300batch: iter_time=1.097e-04, loss_ctc=72.507, loss_att=53.276, acc=0.730, loss=59.063, grad_norm=5.743, loss_scale=1.000, learning_rate=6.297e-05, step_time=0.369 [cnode7-018:0/16] 2024-12-09 22:25:57,250 (deepspeed_trainer:228) INFO: 41epoch:train:5301-5400batch: iter_time=1.109e-04, loss_ctc=60.823, loss_att=47.950, acc=0.719, loss=51.814, grad_norm=5.344, loss_scale=1.000, learning_rate=6.297e-05, step_time=0.364 [cnode7-018:0/16] 2024-12-09 22:26:34,132 (deepspeed_trainer:228) INFO: 41epoch:train:5401-5500batch: iter_time=1.066e-04, loss_ctc=60.639, loss_att=48.550, acc=0.722, loss=52.164, grad_norm=5.698, loss_scale=1.000, learning_rate=6.296e-05, step_time=0.368 [cnode7-018:0/16] 2024-12-09 22:27:10,900 (deepspeed_trainer:228) INFO: 41epoch:train:5501-5600batch: iter_time=1.102e-04, loss_ctc=71.631, loss_att=51.824, acc=0.733, loss=57.778, grad_norm=5.907, loss_scale=1.000, learning_rate=6.295e-05, step_time=0.368 [cnode7-018:0/16] 2024-12-09 22:27:23,518 (multiple_iter_factory:32) INFO: Building 3th iter-factory... [cnode7-018:0/16] 2024-12-09 22:27:50,179 (s2t:444) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') [cnode7-018:0/16] 2024-12-09 22:28:06,598 (abs_task:1807) INFO: [train] dataset: ESPnetDataset( speech: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp/split.5", "type": "kaldi_ark"} text_prev: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev/split.5", "type": "text"} text_ctc: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc/split.5", "type": "text"} text: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text/split.5", "type": "text"} preprocess: ) [cnode7-018:0/16] 2024-12-09 22:28:06,598 (abs_task:1808) INFO: [train] Batch sampler: SortedBatchSampler(N-batch=28521, batch_size=256, shape_file=exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape/split.5, sort_in_batch=descending, sort_batch=descending) [cnode7-018:0/16] 2024-12-09 22:28:06,600 (abs_task:1809) INFO: [train] mini-batch sizes summary: N-batch=28521, mean=256.0, min=256, max=257 [2024-12-09 22:28:33,516] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:33,842] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:34,589] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:36,816] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:36,973] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:35,170] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:35,310] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:37,262] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:35,519] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:35,636] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:37,870] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:38,220] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:38,276] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:38,431] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:38,493] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:28:41,748] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:21,865] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:23,444] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:25,444] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:23,935] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:24,057] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:25,952] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:24,402] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:24,925] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:24,963] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:26,977] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:26,998] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:27,170] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:27,592] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:27,634] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:27,855] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:29:30,661] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:11,101] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:11,779] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:12,111] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:14,087] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:14,368] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:12,885] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:13,188] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:15,358] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:13,573] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:15,771] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:14,109] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:16,239] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:16,713] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:16,994] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:17,048] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:18,476] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:30:58,924] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:00,708] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:01,098] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:03,010] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:03,087] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:01,413] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:03,770] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:02,040] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:04,305] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:02,541] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:04,759] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:05,206] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:05,494] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:06,301] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:05,084] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:31:06,984] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [cnode7-018:0/16] 2024-12-09 22:32:01,955 (deepspeed_trainer:228) INFO: 41epoch:train:5601-5700batch: iter_time=2.513, loss_ctc=71.726, loss_att=51.496, acc=0.728, loss=57.557, grad_norm=5.922, loss_scale=1.000, learning_rate=6.295e-05, step_time=0.397 [cnode7-018:0/16] 2024-12-09 22:32:39,823 (deepspeed_trainer:228) INFO: 41epoch:train:5701-5800batch: iter_time=1.105e-04, loss_ctc=83.024, loss_att=65.043, acc=0.705, loss=70.431, grad_norm=7.163, loss_scale=1.000, learning_rate=6.294e-05, step_time=0.378 [cnode7-018:0/16] 2024-12-09 22:33:16,642 (deepspeed_trainer:228) INFO: 41epoch:train:5801-5900batch: iter_time=1.099e-04, loss_ctc=65.027, loss_att=48.104, acc=0.731, loss=53.194, grad_norm=5.538, loss_scale=1.000, learning_rate=6.294e-05, step_time=0.368 [2024-12-09 22:33:53,944] [INFO] [logging.py:129:log_dist] [Rank 0] step=606000, skipped=0, lr=[np.float64(6.293162562899316e-05)], mom=[[0.9, 0.98]] [2024-12-09 22:33:53,945] [INFO] [timer.py:264:stop] epoch=0/micro_step=6000/global_step=6000, RunningAvgSamplesPerSec=44.0117012491347, CurrSamplesPerSec=47.52435373695234, MemAllocated=1.77GB, MaxMemAllocated=15.7GB [cnode7-018:0/16] 2024-12-09 22:33:53,947 (deepspeed_trainer:228) INFO: 41epoch:train:5901-6000batch: iter_time=1.088e-04, loss_ctc=71.529, loss_att=54.897, acc=0.721, loss=59.863, grad_norm=5.688, loss_scale=1.000, learning_rate=6.293e-05, step_time=0.373 [cnode7-018:0/16] 2024-12-09 22:34:30,495 (deepspeed_trainer:228) INFO: 41epoch:train:6001-6100batch: iter_time=1.099e-04, loss_ctc=62.837, loss_att=46.642, acc=0.731, loss=51.485, grad_norm=5.778, loss_scale=1.000, learning_rate=6.293e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:35:07,226 (deepspeed_trainer:228) INFO: 41epoch:train:6101-6200batch: iter_time=1.116e-04, loss_ctc=69.832, loss_att=50.549, acc=0.724, loss=56.355, grad_norm=5.873, loss_scale=1.000, learning_rate=6.292e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:35:43,709 (deepspeed_trainer:228) INFO: 41epoch:train:6201-6300batch: iter_time=1.101e-04, loss_ctc=60.842, loss_att=40.789, acc=0.750, loss=46.801, grad_norm=5.641, loss_scale=1.000, learning_rate=6.292e-05, step_time=0.364 [cnode7-018:0/16] 2024-12-09 22:36:20,295 (deepspeed_trainer:228) INFO: 41epoch:train:6301-6400batch: iter_time=1.117e-04, loss_ctc=70.043, loss_att=53.111, acc=0.720, loss=58.183, grad_norm=5.379, loss_scale=1.000, learning_rate=6.291e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:36:57,023 (deepspeed_trainer:228) INFO: 41epoch:train:6401-6500batch: iter_time=1.102e-04, loss_ctc=70.998, loss_att=51.465, acc=0.721, loss=57.309, grad_norm=7.253, loss_scale=1.000, learning_rate=6.291e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:37:33,191 (deepspeed_trainer:228) INFO: 41epoch:train:6501-6600batch: iter_time=1.121e-04, loss_ctc=70.732, loss_att=51.041, acc=0.716, loss=56.954, grad_norm=7.156, loss_scale=1.000, learning_rate=6.290e-05, step_time=0.361 [cnode7-018:0/16] 2024-12-09 22:38:09,798 (deepspeed_trainer:228) INFO: 41epoch:train:6601-6700batch: iter_time=1.116e-04, loss_ctc=68.127, loss_att=54.428, acc=0.728, loss=58.523, grad_norm=6.320, loss_scale=1.000, learning_rate=6.290e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:38:46,155 (deepspeed_trainer:228) INFO: 41epoch:train:6701-6800batch: iter_time=1.099e-04, loss_ctc=68.048, loss_att=52.267, acc=0.714, loss=56.995, grad_norm=5.576, loss_scale=1.000, learning_rate=6.289e-05, step_time=0.363 [cnode7-018:0/16] 2024-12-09 22:39:22,576 (deepspeed_trainer:228) INFO: 41epoch:train:6801-6900batch: iter_time=1.115e-04, loss_ctc=64.310, loss_att=45.991, acc=0.732, loss=51.520, grad_norm=5.725, loss_scale=1.000, learning_rate=6.289e-05, step_time=0.364 [2024-12-09 22:39:58,811] [INFO] [logging.py:129:log_dist] [Rank 0] step=607000, skipped=0, lr=[np.float64(6.28797661048084e-05)], mom=[[0.9, 0.98]] [2024-12-09 22:39:58,812] [INFO] [timer.py:264:stop] epoch=0/micro_step=7000/global_step=7000, RunningAvgSamplesPerSec=44.11988747197721, CurrSamplesPerSec=46.96726993383569, MemAllocated=1.77GB, MaxMemAllocated=15.7GB [cnode7-018:0/16] 2024-12-09 22:39:58,813 (deepspeed_trainer:228) INFO: 41epoch:train:6901-7000batch: iter_time=1.090e-04, loss_ctc=68.602, loss_att=50.996, acc=0.720, loss=56.271, grad_norm=6.162, loss_scale=1.000, learning_rate=6.288e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 22:40:35,037 (deepspeed_trainer:228) INFO: 41epoch:train:7001-7100batch: iter_time=1.096e-04, loss_ctc=65.768, loss_att=48.182, acc=0.730, loss=53.450, grad_norm=5.372, loss_scale=1.000, learning_rate=6.288e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 22:41:11,289 (deepspeed_trainer:228) INFO: 41epoch:train:7101-7200batch: iter_time=1.145e-04, loss_ctc=71.211, loss_att=48.228, acc=0.732, loss=55.145, grad_norm=7.657, loss_scale=1.000, learning_rate=6.287e-05, step_time=0.362 [cnode7-018:0/16] 2024-12-09 22:41:47,137 (deepspeed_trainer:228) INFO: 41epoch:train:7201-7300batch: iter_time=1.094e-04, loss_ctc=55.833, loss_att=44.659, acc=0.719, loss=47.998, grad_norm=5.471, loss_scale=1.000, learning_rate=6.287e-05, step_time=0.358 [cnode7-018:0/16] 2024-12-09 22:42:23,474 (deepspeed_trainer:228) INFO: 41epoch:train:7301-7400batch: iter_time=1.114e-04, loss_ctc=67.843, loss_att=53.029, acc=0.714, loss=57.473, grad_norm=6.410, loss_scale=1.000, learning_rate=6.286e-05, step_time=0.363 [cnode7-018:0/16] 2024-12-09 22:43:00,130 (deepspeed_trainer:228) INFO: 41epoch:train:7401-7500batch: iter_time=1.090e-04, loss_ctc=72.061, loss_att=53.164, acc=0.727, loss=58.836, grad_norm=6.429, loss_scale=1.000, learning_rate=6.286e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 22:43:03,469 (multiple_iter_factory:32) INFO: Building 4th iter-factory... [cnode7-018:0/16] 2024-12-09 22:43:30,368 (s2t:444) INFO: Optional Data Names: ('text_prev', 'text_ctc', 'text_spk2', 'text_spk3', 'text_spk4') [cnode7-018:0/16] 2024-12-09 22:43:46,800 (abs_task:1807) INFO: [train] dataset: ESPnetDataset( speech: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/wav.scp/split.0", "type": "kaldi_ark"} text_prev: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.prev/split.0", "type": "text"} text_ctc: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text.ctc/split.0", "type": "text"} text: {"path": "exp_owsm/s2t_stats_raw_bpe50000/splits8/text/split.0", "type": "text"} preprocess: ) [cnode7-018:0/16] 2024-12-09 22:43:46,800 (abs_task:1808) INFO: [train] Batch sampler: SortedBatchSampler(N-batch=28521, batch_size=256, shape_file=exp_owsm/s2t_stats_raw_bpe50000/splits8/speech_shape/split.0, sort_in_batch=descending, sort_batch=descending) [cnode7-018:0/16] 2024-12-09 22:43:46,802 (abs_task:1809) INFO: [train] mini-batch sizes summary: N-batch=28521, mean=256.0, min=256, max=257 [2024-12-09 22:44:13,170] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:13,661] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:15,363] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:15,414] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:16,095] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:14,579] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:14,777] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:16,584] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:16,656] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:15,006] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:15,181] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:15,344] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:15,403] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:17,184] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:17,276] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:44:17,324] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:03,040] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:05,038] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:05,080] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:05,274] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:03,681] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:03,750] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:03,856] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:04,346] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:04,460] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:04,768] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:06,538] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:07,045] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:07,123] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:08,293] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:09,967] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:10,316] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:52,519] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:50,873] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:53,142] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:51,551] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:51,655] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:54,306] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:52,681] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:52,794] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:54,854] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:52,907] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:53,183] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:55,230] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:55,522] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:45:58,529] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:00,768] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:02,962] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:40,390] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:41,120] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:39,185] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:39,440] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:39,924] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:42,579] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:42,656] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:43,117] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:41,147] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:41,204] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:41,843] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:41,999] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:47,794] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:49,983] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:48,236] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [2024-12-09 22:46:54,388] [INFO] [real_accelerator.py:219:get_accelerator] Setting ds_accelerator to cuda (auto detect) [cnode7-018:0/16] 2024-12-09 22:47:56,359 (deepspeed_trainer:228) INFO: 41epoch:train:7501-7600batch: iter_time=2.494, loss_ctc=69.859, loss_att=55.882, acc=0.727, loss=60.075, grad_norm=6.038, loss_scale=1.000, learning_rate=6.285e-05, step_time=0.471 [cnode7-018:0/16] 2024-12-09 22:48:33,562 (deepspeed_trainer:228) INFO: 41epoch:train:7601-7700batch: iter_time=1.042e-04, loss_ctc=82.554, loss_att=57.935, acc=0.731, loss=65.271, grad_norm=7.117, loss_scale=1.000, learning_rate=6.285e-05, step_time=0.372 [cnode7-018:0/16] 2024-12-09 22:49:10,045 (deepspeed_trainer:228) INFO: 41epoch:train:7701-7800batch: iter_time=1.075e-04, loss_ctc=70.109, loss_att=52.295, acc=0.726, loss=57.628, grad_norm=6.470, loss_scale=1.000, learning_rate=6.284e-05, step_time=0.364 [cnode7-018:0/16] 2024-12-09 22:49:46,465 (deepspeed_trainer:228) INFO: 41epoch:train:7801-7900batch: iter_time=1.097e-04, loss_ctc=66.264, loss_att=50.957, acc=0.725, loss=55.551, grad_norm=5.735, loss_scale=1.000, learning_rate=6.284e-05, step_time=0.364 [2024-12-09 22:50:23,123] [INFO] [logging.py:129:log_dist] [Rank 0] step=608000, skipped=0, lr=[np.float64(6.282803457598398e-05)], mom=[[0.9, 0.98]] [2024-12-09 22:50:23,124] [INFO] [timer.py:264:stop] epoch=0/micro_step=8000/global_step=8000, RunningAvgSamplesPerSec=44.04348008027492, CurrSamplesPerSec=44.74296545129617, MemAllocated=1.77GB, MaxMemAllocated=15.7GB [cnode7-018:0/16] 2024-12-09 22:50:23,125 (deepspeed_trainer:228) INFO: 41epoch:train:7901-8000batch: iter_time=1.142e-04, loss_ctc=68.302, loss_att=50.553, acc=0.741, loss=55.894, grad_norm=5.300, loss_scale=1.000, learning_rate=6.283e-05, step_time=0.366 [cnode7-018:0/16] 2024-12-09 22:50:59,846 (deepspeed_trainer:228) INFO: 41epoch:train:8001-8100batch: iter_time=1.119e-04, loss_ctc=63.946, loss_att=46.320, acc=0.731, loss=51.618, grad_norm=5.882, loss_scale=1.000, learning_rate=6.283e-05, step_time=0.367 [cnode7-018:0/16] 2024-12-09 22:51:36,387 (deepspeed_trainer:228) INFO: 41epoch:train:8101-8200batch: iter_time=1.099e-04, loss_ctc=61.700, loss_att=44.076, acc=0.741, loss=49.330, grad_norm=5.156, loss_scale=1.000, learning_rate=6.282e-05, step_time=0.365 [cnode7-018:0/16] 2024-12-09 22:52:12,992 (deepspeed_trainer:228) INFO: 41epoch:train:8201-8300batch: iter_time=1.112e-04, loss_ctc=69.001, loss_att=50.063, acc=0.734, loss=55.755, grad_norm=5.223, loss_scale=1.000, learning_rate=6.282e-05, step_time=0.366 Process SpawnProcess-6: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 204, in train_one_epoch loss, stats, weight = model(**batch) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/utils/nvtx.py", line 18, in wrapped_fn ret_val = func(*args, **kwargs) File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/site-packages/deepspeed/runtime/engine.py", line 1899, in forward loss = self.module(*inputs, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) File "/mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py", line 225, in forward loss_att, acc_att, cer_att, wer_att = self._calc_att_loss( File "/mnt/home/williamchen/espnet/espnet2/s2t/espnet_model.py", line 391, in _calc_att_loss decoder_out, _ = self.decoder( File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) File "/mnt/home/williamchen/espnet/espnet2/asr/decoder/transformer_decoder.py", line 159, in forward x, tgt_mask, memory, memory_mask = decoder_layer( File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) File "/mnt/home/williamchen/espnet/espnet/nets/pytorch_backend/transformer/decoder_layer.py", line 163, in forward x = residual + self.dropout(self.src_attn(x, memory, memory, memory_mask)) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) File "/mnt/home/williamchen/espnet/espnet/nets/pytorch_backend/transformer/attention.py", line 236, in forward q, k, v = self.forward_qkv(query, key, value, expand_kv) File "/mnt/home/williamchen/espnet/espnet/nets/pytorch_backend/transformer/attention.py", line 88, in forward_qkv q = self.linear_q(query).view(n_batch, -1, self.h, self.d_k) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1553, in _wrapped_call_impl return self._call_impl(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/module.py", line 1562, in _call_impl return forward_call(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/nn/modules/linear.py", line 117, in forward return F.linear(input, self.weight, self.bias) torch.OutOfMemoryError: CUDA out of memory. Tried to allocate 1.73 GiB. GPU 5 has a total capacity of 79.11 GiB of which 1.62 GiB is free. Including non-PyTorch memory, this process has 77.48 GiB memory in use. Of the allocated memory 70.73 GiB is allocated by PyTorch, and 698.50 MiB is reserved by PyTorch but unallocated. If reserved but unallocated memory is large try setting PYTORCH_CUDA_ALLOC_CONF=expandable_segments:True to avoid fragmentation. See documentation for Memory Management (https://pytorch.org/docs/stable/notes/cuda.html#environment-variables) cnode7-018:4121587:4131941 [1] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121587:4122744 [1] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121593:4131950 [7] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121593:4122731 [7] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121592:4131943 [6] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121592:4122734 [6] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121590:4131942 [4] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121590:4122733 [4] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121588:4131937 [2] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121588:4122742 [2] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121586:4131936 [0] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121586:4122730 [0] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121589:4131948 [3] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-018:4121589:4122737 [3] NCCL INFO [Service thread] Connection closed by localRank 5 W1209 22:52:24.173000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 4121586 via signal SIGTERM W1209 22:52:24.174000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 4121587 via signal SIGTERM W1209 22:52:24.174000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 4121588 via signal SIGTERM W1209 22:52:24.175000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 4121589 via signal SIGTERM W1209 22:52:24.176000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 4121590 via signal SIGTERM W1209 22:52:24.176000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 4121592 via signal SIGTERM W1209 22:52:24.177000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 4121593 via signal SIGTERM Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/runpy.py", line 196, in _run_module_as_main return _run_code(code, main_globals, None, File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/runpy.py", line 86, in _run_code exec(code, run_globals) File "/mnt/home/williamchen/espnet/espnet2/bin/s2t_train.py", line 23, in main() File "/mnt/home/williamchen/espnet/espnet2/bin/s2t_train.py", line 19, in main S2TTask.main(cmd=cmd) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1273, in main while not ProcessContext(processes, error_files).join(): File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/multiprocessing/spawn.py", line 178, in join raise ProcessExitedException( torch.multiprocessing.spawn.ProcessExitedException: process 5 terminated with exit code 1 srun: error: cnode7-018: task 0: Exited with exit code 1 [rank15]:[E1209 23:02:15.200810824 ProcessGroupNCCL.cpp:607] [Rank 15] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600002 milliseconds before timing out. [rank9]:[E1209 23:02:15.200816296 ProcessGroupNCCL.cpp:607] [Rank 9] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600006 milliseconds before timing out. [rank8]:[E1209 23:02:15.200823371 ProcessGroupNCCL.cpp:607] [Rank 8] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600005 milliseconds before timing out. [rank14]:[E1209 23:02:15.200822291 ProcessGroupNCCL.cpp:607] [Rank 14] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600004 milliseconds before timing out. [rank13]:[E1209 23:02:15.200827729 ProcessGroupNCCL.cpp:607] [Rank 13] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600003 milliseconds before timing out. [rank11]:[E1209 23:02:15.201007999 ProcessGroupNCCL.cpp:607] [Rank 11] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600007 milliseconds before timing out. [rank10]:[E1209 23:02:15.201912860 ProcessGroupNCCL.cpp:607] [Rank 10] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600009 milliseconds before timing out. [rank12]:[E1209 23:02:15.205215341 ProcessGroupNCCL.cpp:607] [Rank 12] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600009 milliseconds before timing out. [rank15]:[E1209 23:02:15.209342023 ProcessGroupNCCL.cpp:607] [Rank 15] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600017 milliseconds before timing out. [rank15]:[E1209 23:02:15.223996010 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 15] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank15]:[E1209 23:02:15.224010135 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 15] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank15]:[E1209 23:02:15.224014238 ProcessGroupNCCL.cpp:621] [Rank 15] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank13]:[E1209 23:02:15.224026873 ProcessGroupNCCL.cpp:670] [Rank 13] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). [rank8]:[E1209 23:02:15.224028834 ProcessGroupNCCL.cpp:670] [Rank 8] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). [rank12]:[E1209 23:02:15.224043932 ProcessGroupNCCL.cpp:670] [Rank 12] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). [rank15]:[E1209 23:02:15.224045649 ProcessGroupNCCL.cpp:670] [Rank 15] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). [rank11]:[E1209 23:02:15.224046864 ProcessGroupNCCL.cpp:670] [Rank 11] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). [rank9]:[E1209 23:02:15.224057561 ProcessGroupNCCL.cpp:670] [Rank 9] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). [rank14]:[E1209 23:02:15.224057560 ProcessGroupNCCL.cpp:670] [Rank 14] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). [rank10]:[E1209 23:02:15.224057775 ProcessGroupNCCL.cpp:670] [Rank 10] Work WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) timed out in blocking wait (TORCH_NCCL_BLOCKING_WAIT=1). cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 2 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254249:264152 [2] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254251:264154 [4] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254253:264148 [6] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254252:264157 [5] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254247:264159 [0] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254250:264160 [3] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254248:264153 [1] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 3 cnode7-019:254254:264150 [7] NCCL INFO [Service thread] Connection closed by localRank 1 cnode7-019:254247:254247 [0] NCCL INFO comm 0x5555b6f2c1f0 rank 8 nranks 16 cudaDev 0 busId 1b000 - Abort COMPLETE [rank8]:[E1209 23:02:15.735868969 ProcessGroupNCCL.cpp:621] [Rank 8] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank8]:[E1209 23:02:15.735883222 ProcessGroupNCCL.cpp:627] [Rank 8] To avoid data inconsistency, we are taking the entire process down. cnode7-019:254254:254254 [7] NCCL INFO comm 0x5555c0ef04c0 rank 15 nranks 16 cudaDev 7 busId df000 - Abort COMPLETE [rank15]:[E1209 23:02:15.744683146 ProcessGroupNCCL.cpp:621] [Rank 15] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank15]:[E1209 23:02:15.744693696 ProcessGroupNCCL.cpp:627] [Rank 15] To avoid data inconsistency, we are taking the entire process down. cnode7-019:254248:254248 [1] NCCL INFO comm 0x5555c0f35e80 rank 9 nranks 16 cudaDev 1 busId 43000 - Abort COMPLETE [rank9]:[E1209 23:02:15.763197104 ProcessGroupNCCL.cpp:621] [Rank 9] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank9]:[E1209 23:02:15.763212377 ProcessGroupNCCL.cpp:627] [Rank 9] To avoid data inconsistency, we are taking the entire process down. cnode7-019:254251:254251 [4] NCCL INFO comm 0x5555d4f0da80 rank 12 nranks 16 cudaDev 4 busId 9d000 - Abort COMPLETE [rank12]:[E1209 23:02:15.764893071 ProcessGroupNCCL.cpp:621] [Rank 12] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank12]:[E1209 23:02:15.764913370 ProcessGroupNCCL.cpp:627] [Rank 12] To avoid data inconsistency, we are taking the entire process down. cnode7-019:254249:254249 [2] NCCL INFO comm 0x5555c0f2fd40 rank 10 nranks 16 cudaDev 2 busId 52000 - Abort COMPLETE [rank10]:[E1209 23:02:15.771938757 ProcessGroupNCCL.cpp:621] [Rank 10] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank10]:[E1209 23:02:15.771955127 ProcessGroupNCCL.cpp:627] [Rank 10] To avoid data inconsistency, we are taking the entire process down. [rank10]:[E1209 23:02:15.798481196 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 10] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank9]:[E1209 23:02:15.798483481 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 9] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank10]:[E1209 23:02:15.798492625 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 10] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank12]:[E1209 23:02:15.798487666 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 12] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank9]:[E1209 23:02:15.798494482 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 9] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank10]:[E1209 23:02:15.798498687 ProcessGroupNCCL.cpp:621] [Rank 10] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank12]:[E1209 23:02:15.798499855 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 12] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank9]:[E1209 23:02:15.798501427 ProcessGroupNCCL.cpp:621] [Rank 9] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank12]:[E1209 23:02:15.798504718 ProcessGroupNCCL.cpp:621] [Rank 12] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank8]:[E1209 23:02:15.798503176 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 8] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank8]:[E1209 23:02:15.798514359 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 8] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank8]:[E1209 23:02:15.798520131 ProcessGroupNCCL.cpp:621] [Rank 8] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. cnode7-019:254252:254252 [5] NCCL INFO comm 0x5555be7676e0 rank 13 nranks 16 cudaDev 5 busId c3000 - Abort COMPLETE [rank13]:[E1209 23:02:15.802973847 ProcessGroupNCCL.cpp:621] [Rank 13] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank13]:[E1209 23:02:15.802988878 ProcessGroupNCCL.cpp:627] [Rank 13] To avoid data inconsistency, we are taking the entire process down. [rank13]:[E1209 23:02:15.804885458 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 13] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank13]:[E1209 23:02:15.804896596 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 13] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank13]:[E1209 23:02:15.804901146 ProcessGroupNCCL.cpp:621] [Rank 13] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. cnode7-019:254250:254250 [3] NCCL INFO comm 0x5555c0f045c0 rank 11 nranks 16 cudaDev 3 busId 61000 - Abort COMPLETE [rank11]:[E1209 23:02:15.806120085 ProcessGroupNCCL.cpp:621] [Rank 11] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank11]:[E1209 23:02:15.806134389 ProcessGroupNCCL.cpp:627] [Rank 11] To avoid data inconsistency, we are taking the entire process down. [rank11]:[E1209 23:02:15.808103687 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 11] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank11]:[E1209 23:02:15.808115614 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 11] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank11]:[E1209 23:02:15.808120759 ProcessGroupNCCL.cpp:621] [Rank 11] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. cnode7-019:254253:254253 [6] NCCL INFO comm 0x5555c0ef7e60 rank 14 nranks 16 cudaDev 6 busId d1000 - Abort COMPLETE [rank14]:[E1209 23:02:15.822538736 ProcessGroupNCCL.cpp:621] [Rank 14] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. [rank14]:[E1209 23:02:15.822557288 ProcessGroupNCCL.cpp:627] [Rank 14] To avoid data inconsistency, we are taking the entire process down. [rank14]:[E1209 23:02:15.824493745 ProcessGroupNCCL.cpp:1664] [PG 0 (default_pg) Rank 14] Exception (either an error or timeout) detected by watchdog at work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank14]:[E1209 23:02:15.824508333 ProcessGroupNCCL.cpp:1709] [PG 0 (default_pg) Rank 14] Timeout at NCCL work: 41536, last enqueued NCCL work: 41536, last completed NCCL work: 41535. [rank14]:[E1209 23:02:15.824513411 ProcessGroupNCCL.cpp:621] [Rank 14] Some NCCL operations have failed or timed out. Due to the asynchronous nature of CUDA kernels, subsequent GPU operations might run on corrupted/incomplete data. Process SpawnProcess-7: Process SpawnProcess-1: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 14] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600004 milliseconds before timing out. Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 8] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600005 milliseconds before timing out. Process SpawnProcess-5: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 12] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600009 milliseconds before timing out. Process SpawnProcess-6: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 13] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600003 milliseconds before timing out. Process SpawnProcess-8: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 15] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600002 milliseconds before timing out. Process SpawnProcess-4: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 11] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600007 milliseconds before timing out. Process SpawnProcess-2: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 9] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600006 milliseconds before timing out. Process SpawnProcess-3: Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 314, in _bootstrap self.run() File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/multiprocessing/process.py", line 108, in run self._target(*self._args, **self._kwargs) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1568, in main_worker cls.trainer.run( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 132, in run cls.train_one_epoch( File "/mnt/home/williamchen/espnet/espnet2/train/deepspeed_trainer.py", line 208, in train_one_epoch stats, weight = recursive_average(stats, weight, True) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 53, in recursive_average obj = recursive_sum(obj, weight, distributed) File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in recursive_sum return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 14, in return {k: recursive_sum(v, weight, distributed) for k, v in obj.items()} File "/mnt/home/williamchen/espnet/espnet2/torch_utils/recursive_op.py", line 22, in recursive_sum torch.distributed.all_gather(lst, obj) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/c10d_logger.py", line 79, in wrapper return func(*args, **kwargs) File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/distributed/distributed_c10d.py", line 3113, in all_gather work.wait() torch.distributed.DistBackendError: [Rank 10] Watchdog caught collective operation timeout: WorkNCCL(SeqNum=41536, OpType=ALLGATHER, NumelIn=1, NumelOut=16, Timeout(ms)=600000) ran for 600009 milliseconds before timing out. cnode7-019:254248:255168 [1] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254249:255174 [2] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254250:255180 [3] NCCL INFO [Service thread] Connection closed by localRank 5 cnode7-019:254250:255180 [3] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254250:255180 [3] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254250:255180 [3] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254248:255168 [1] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254248:255168 [1] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254248:255168 [1] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254248:255168 [1] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254249:255174 [2] NCCL INFO [Service thread] Connection closed by localRank 0 cnode7-019:254249:255174 [2] NCCL INFO [Service thread] Connection closed by localRank 6 cnode7-019:254249:255174 [2] NCCL INFO [Service thread] Connection closed by localRank 7 cnode7-019:254249:255174 [2] NCCL INFO [Service thread] Connection closed by localRank 4 cnode7-019:254250:255180 [3] NCCL INFO [Service thread] Connection closed by localRank 4 W1209 23:02:24.965000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 254247 via signal SIGTERM W1209 23:02:24.965000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 254248 via signal SIGTERM W1209 23:02:24.965000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 254249 via signal SIGTERM W1209 23:02:24.966000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 254250 via signal SIGTERM W1209 23:02:24.966000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 254251 via signal SIGTERM W1209 23:02:24.966000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 254253 via signal SIGTERM W1209 23:02:24.966000 23456244418368 torch/multiprocessing/spawn.py:146] Terminating process 254254 via signal SIGTERM Traceback (most recent call last): File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/runpy.py", line 196, in _run_module_as_main return _run_code(code, main_globals, None, File "/mnt/home/williamchen/espnet/tools/miniconda/envs/espnet/lib/python3.10/runpy.py", line 86, in _run_code exec(code, run_globals) File "/mnt/home/williamchen/espnet/espnet2/bin/s2t_train.py", line 23, in main() File "/mnt/home/williamchen/espnet/espnet2/bin/s2t_train.py", line 19, in main S2TTask.main(cmd=cmd) File "/mnt/home/williamchen/espnet/espnet2/tasks/abs_task.py", line 1273, in main while not ProcessContext(processes, error_files).join(): File "/mnt/home/williamchen/.local/lib/python3.10/site-packages/torch/multiprocessing/spawn.py", line 178, in join raise ProcessExitedException( torch.multiprocessing.spawn.ProcessExitedException: process 5 terminated with exit code 1 srun: error: cnode7-019: task 1: Exited with exit code 1 # Accounting: begin_time=1733751534 # Accounting: end_time=1733756547 # Accounting: time=5013 threads=1 # Finished at Mon Dec 9 23:02:27 CST 2024 with status 1