firework8 commited on
Commit
890e732
·
verified ·
1 Parent(s): 5545deb

Upload 64 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/config.yaml +56 -0
  2. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/eval_results/best_acc.pkl +3 -0
  3. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/log.txt +929 -0
  4. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/shift_gcn.py +216 -0
  5. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/config.yaml +56 -0
  6. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/eval_results/best_acc.pkl +3 -0
  7. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/log.txt +929 -0
  8. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/shift_gcn.py +216 -0
  9. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/config.yaml +56 -0
  10. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/eval_results/best_acc.pkl +3 -0
  11. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/log.txt +929 -0
  12. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/shift_gcn.py +216 -0
  13. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/config.yaml +56 -0
  14. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/eval_results/best_acc.pkl +3 -0
  15. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/log.txt +929 -0
  16. ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/shift_gcn.py +216 -0
  17. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/config.yaml +56 -0
  18. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/eval_results/best_acc.pkl +3 -0
  19. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/log.txt +1043 -0
  20. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/shift_gcn.py +216 -0
  21. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/config.yaml +56 -0
  22. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/eval_results/best_acc.pkl +3 -0
  23. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/log.txt +1043 -0
  24. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/shift_gcn.py +216 -0
  25. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/config.yaml +56 -0
  26. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/eval_results/best_acc.pkl +3 -0
  27. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/log.txt +1043 -0
  28. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/shift_gcn.py +216 -0
  29. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/config.yaml +56 -0
  30. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/eval_results/best_acc.pkl +3 -0
  31. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/log.txt +1043 -0
  32. ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/shift_gcn.py +216 -0
  33. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/config.yaml +58 -0
  34. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/eval_results/best_acc.pkl +3 -0
  35. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/log.txt +875 -0
  36. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/shift_gcn.py +216 -0
  37. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/config.yaml +58 -0
  38. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/eval_results/best_acc.pkl +3 -0
  39. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/log.txt +875 -0
  40. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/shift_gcn.py +216 -0
  41. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/config.yaml +56 -0
  42. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/eval_results/best_acc.pkl +3 -0
  43. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/log.txt +875 -0
  44. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/shift_gcn.py +216 -0
  45. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/config.yaml +56 -0
  46. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/eval_results/best_acc.pkl +3 -0
  47. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/log.txt +893 -0
  48. ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/shift_gcn.py +216 -0
  49. ckpt/Others/Shift-GCN/ntu60_xview/ntu_ShiftGCN_bone_motion_xview/config.yaml +56 -0
  50. ckpt/Others/Shift-GCN/ntu60_xview/ntu_ShiftGCN_bone_motion_xview/eval_results/best_acc.pkl +3 -0
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_bone_motion_xset
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xset/train_bone_motion.yaml
5
+ device:
6
+ - 2
7
+ - 3
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_bone_motion_xset
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_bone_motion_xset
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bc835811033c5f5255dff5b60d8f19d5c13216e0c476205e7c28746fb214f412
3
+ size 34946665
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/log.txt ADDED
@@ -0,0 +1,929 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 15 20:53:13 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_bone_motion_xset', 'model_saved_name': './save_models/ntu120_bone_motion_xset', 'Experiment_name': 'ntu120_bone_motion_xset', 'config': './config/ntu120_xset/train_bone_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Thu Sep 15 20:53:13 2022 ] Training epoch: 1
5
+ [ Thu Sep 15 20:54:31 2022 ] Batch(99/162) done. Loss: 3.1825 lr:0.100000 network_time: 0.0273
6
+ [ Thu Sep 15 20:55:16 2022 ] Eval epoch: 1
7
+ [ Thu Sep 15 20:57:05 2022 ] Mean test loss of 930 batches: 4.908753395080566.
8
+ [ Thu Sep 15 20:57:05 2022 ] Top1: 9.45%
9
+ [ Thu Sep 15 20:57:06 2022 ] Top5: 27.60%
10
+ [ Thu Sep 15 20:57:06 2022 ] Training epoch: 2
11
+ [ Thu Sep 15 20:57:37 2022 ] Batch(37/162) done. Loss: 2.2182 lr:0.100000 network_time: 0.0279
12
+ [ Thu Sep 15 20:58:50 2022 ] Batch(137/162) done. Loss: 2.2917 lr:0.100000 network_time: 0.0297
13
+ [ Thu Sep 15 20:59:07 2022 ] Eval epoch: 2
14
+ [ Thu Sep 15 21:00:56 2022 ] Mean test loss of 930 batches: 4.822448253631592.
15
+ [ Thu Sep 15 21:00:57 2022 ] Top1: 14.80%
16
+ [ Thu Sep 15 21:00:57 2022 ] Top5: 37.31%
17
+ [ Thu Sep 15 21:00:57 2022 ] Training epoch: 3
18
+ [ Thu Sep 15 21:01:56 2022 ] Batch(75/162) done. Loss: 2.0696 lr:0.100000 network_time: 0.0258
19
+ [ Thu Sep 15 21:02:59 2022 ] Eval epoch: 3
20
+ [ Thu Sep 15 21:04:48 2022 ] Mean test loss of 930 batches: 3.4721670150756836.
21
+ [ Thu Sep 15 21:04:48 2022 ] Top1: 24.95%
22
+ [ Thu Sep 15 21:04:49 2022 ] Top5: 50.87%
23
+ [ Thu Sep 15 21:04:49 2022 ] Training epoch: 4
24
+ [ Thu Sep 15 21:05:03 2022 ] Batch(13/162) done. Loss: 1.4831 lr:0.100000 network_time: 0.0273
25
+ [ Thu Sep 15 21:06:15 2022 ] Batch(113/162) done. Loss: 1.6804 lr:0.100000 network_time: 0.0265
26
+ [ Thu Sep 15 21:06:50 2022 ] Eval epoch: 4
27
+ [ Thu Sep 15 21:08:39 2022 ] Mean test loss of 930 batches: 3.2496213912963867.
28
+ [ Thu Sep 15 21:08:39 2022 ] Top1: 28.67%
29
+ [ Thu Sep 15 21:08:40 2022 ] Top5: 55.53%
30
+ [ Thu Sep 15 21:08:40 2022 ] Training epoch: 5
31
+ [ Thu Sep 15 21:09:21 2022 ] Batch(51/162) done. Loss: 1.3771 lr:0.100000 network_time: 0.0268
32
+ [ Thu Sep 15 21:10:34 2022 ] Batch(151/162) done. Loss: 1.7848 lr:0.100000 network_time: 0.0262
33
+ [ Thu Sep 15 21:10:41 2022 ] Eval epoch: 5
34
+ [ Thu Sep 15 21:12:30 2022 ] Mean test loss of 930 batches: 3.401992082595825.
35
+ [ Thu Sep 15 21:12:31 2022 ] Top1: 28.73%
36
+ [ Thu Sep 15 21:12:31 2022 ] Top5: 57.81%
37
+ [ Thu Sep 15 21:12:31 2022 ] Training epoch: 6
38
+ [ Thu Sep 15 21:13:40 2022 ] Batch(89/162) done. Loss: 1.3649 lr:0.100000 network_time: 0.0261
39
+ [ Thu Sep 15 21:14:32 2022 ] Eval epoch: 6
40
+ [ Thu Sep 15 21:16:21 2022 ] Mean test loss of 930 batches: 2.9739701747894287.
41
+ [ Thu Sep 15 21:16:22 2022 ] Top1: 29.80%
42
+ [ Thu Sep 15 21:16:22 2022 ] Top5: 62.30%
43
+ [ Thu Sep 15 21:16:23 2022 ] Training epoch: 7
44
+ [ Thu Sep 15 21:16:46 2022 ] Batch(27/162) done. Loss: 1.4052 lr:0.100000 network_time: 0.0294
45
+ [ Thu Sep 15 21:17:59 2022 ] Batch(127/162) done. Loss: 0.7479 lr:0.100000 network_time: 0.0262
46
+ [ Thu Sep 15 21:18:24 2022 ] Eval epoch: 7
47
+ [ Thu Sep 15 21:20:13 2022 ] Mean test loss of 930 batches: 2.601893901824951.
48
+ [ Thu Sep 15 21:20:14 2022 ] Top1: 37.29%
49
+ [ Thu Sep 15 21:20:14 2022 ] Top5: 69.45%
50
+ [ Thu Sep 15 21:20:14 2022 ] Training epoch: 8
51
+ [ Thu Sep 15 21:21:06 2022 ] Batch(65/162) done. Loss: 1.2406 lr:0.100000 network_time: 0.0277
52
+ [ Thu Sep 15 21:22:16 2022 ] Eval epoch: 8
53
+ [ Thu Sep 15 21:24:05 2022 ] Mean test loss of 930 batches: 2.694429397583008.
54
+ [ Thu Sep 15 21:24:05 2022 ] Top1: 38.20%
55
+ [ Thu Sep 15 21:24:06 2022 ] Top5: 69.74%
56
+ [ Thu Sep 15 21:24:06 2022 ] Training epoch: 9
57
+ [ Thu Sep 15 21:24:12 2022 ] Batch(3/162) done. Loss: 0.8463 lr:0.100000 network_time: 0.0294
58
+ [ Thu Sep 15 21:25:25 2022 ] Batch(103/162) done. Loss: 0.9506 lr:0.100000 network_time: 0.0303
59
+ [ Thu Sep 15 21:26:07 2022 ] Eval epoch: 9
60
+ [ Thu Sep 15 21:27:56 2022 ] Mean test loss of 930 batches: 3.4346163272857666.
61
+ [ Thu Sep 15 21:27:56 2022 ] Top1: 32.62%
62
+ [ Thu Sep 15 21:27:57 2022 ] Top5: 64.30%
63
+ [ Thu Sep 15 21:27:57 2022 ] Training epoch: 10
64
+ [ Thu Sep 15 21:28:31 2022 ] Batch(41/162) done. Loss: 0.6424 lr:0.100000 network_time: 0.0260
65
+ [ Thu Sep 15 21:29:44 2022 ] Batch(141/162) done. Loss: 0.9037 lr:0.100000 network_time: 0.0271
66
+ [ Thu Sep 15 21:29:58 2022 ] Eval epoch: 10
67
+ [ Thu Sep 15 21:31:47 2022 ] Mean test loss of 930 batches: 3.4100637435913086.
68
+ [ Thu Sep 15 21:31:47 2022 ] Top1: 35.12%
69
+ [ Thu Sep 15 21:31:48 2022 ] Top5: 65.64%
70
+ [ Thu Sep 15 21:31:48 2022 ] Training epoch: 11
71
+ [ Thu Sep 15 21:32:49 2022 ] Batch(79/162) done. Loss: 0.8555 lr:0.100000 network_time: 0.0272
72
+ [ Thu Sep 15 21:33:49 2022 ] Eval epoch: 11
73
+ [ Thu Sep 15 21:35:38 2022 ] Mean test loss of 930 batches: 2.7729108333587646.
74
+ [ Thu Sep 15 21:35:38 2022 ] Top1: 38.37%
75
+ [ Thu Sep 15 21:35:39 2022 ] Top5: 71.00%
76
+ [ Thu Sep 15 21:35:39 2022 ] Training epoch: 12
77
+ [ Thu Sep 15 21:35:55 2022 ] Batch(17/162) done. Loss: 0.9231 lr:0.100000 network_time: 0.0268
78
+ [ Thu Sep 15 21:37:08 2022 ] Batch(117/162) done. Loss: 1.0566 lr:0.100000 network_time: 0.0222
79
+ [ Thu Sep 15 21:37:40 2022 ] Eval epoch: 12
80
+ [ Thu Sep 15 21:39:29 2022 ] Mean test loss of 930 batches: 3.1429965496063232.
81
+ [ Thu Sep 15 21:39:30 2022 ] Top1: 38.27%
82
+ [ Thu Sep 15 21:39:30 2022 ] Top5: 70.08%
83
+ [ Thu Sep 15 21:39:30 2022 ] Training epoch: 13
84
+ [ Thu Sep 15 21:40:14 2022 ] Batch(55/162) done. Loss: 0.6888 lr:0.100000 network_time: 0.0275
85
+ [ Thu Sep 15 21:41:27 2022 ] Batch(155/162) done. Loss: 0.8608 lr:0.100000 network_time: 0.0269
86
+ [ Thu Sep 15 21:41:31 2022 ] Eval epoch: 13
87
+ [ Thu Sep 15 21:43:20 2022 ] Mean test loss of 930 batches: 2.652634620666504.
88
+ [ Thu Sep 15 21:43:21 2022 ] Top1: 39.41%
89
+ [ Thu Sep 15 21:43:21 2022 ] Top5: 72.22%
90
+ [ Thu Sep 15 21:43:22 2022 ] Training epoch: 14
91
+ [ Thu Sep 15 21:44:33 2022 ] Batch(93/162) done. Loss: 0.8529 lr:0.100000 network_time: 0.0314
92
+ [ Thu Sep 15 21:45:23 2022 ] Eval epoch: 14
93
+ [ Thu Sep 15 21:47:12 2022 ] Mean test loss of 930 batches: 2.584991693496704.
94
+ [ Thu Sep 15 21:47:12 2022 ] Top1: 39.55%
95
+ [ Thu Sep 15 21:47:13 2022 ] Top5: 72.15%
96
+ [ Thu Sep 15 21:47:13 2022 ] Training epoch: 15
97
+ [ Thu Sep 15 21:47:39 2022 ] Batch(31/162) done. Loss: 0.6209 lr:0.100000 network_time: 0.0335
98
+ [ Thu Sep 15 21:48:52 2022 ] Batch(131/162) done. Loss: 0.6853 lr:0.100000 network_time: 0.0272
99
+ [ Thu Sep 15 21:49:14 2022 ] Eval epoch: 15
100
+ [ Thu Sep 15 21:51:02 2022 ] Mean test loss of 930 batches: 3.4786317348480225.
101
+ [ Thu Sep 15 21:51:03 2022 ] Top1: 38.24%
102
+ [ Thu Sep 15 21:51:03 2022 ] Top5: 69.38%
103
+ [ Thu Sep 15 21:51:04 2022 ] Training epoch: 16
104
+ [ Thu Sep 15 21:51:58 2022 ] Batch(69/162) done. Loss: 0.7813 lr:0.100000 network_time: 0.0280
105
+ [ Thu Sep 15 21:53:05 2022 ] Eval epoch: 16
106
+ [ Thu Sep 15 21:54:54 2022 ] Mean test loss of 930 batches: 3.6086277961730957.
107
+ [ Thu Sep 15 21:54:54 2022 ] Top1: 35.35%
108
+ [ Thu Sep 15 21:54:55 2022 ] Top5: 64.75%
109
+ [ Thu Sep 15 21:54:55 2022 ] Training epoch: 17
110
+ [ Thu Sep 15 21:55:05 2022 ] Batch(7/162) done. Loss: 0.7263 lr:0.100000 network_time: 0.0273
111
+ [ Thu Sep 15 21:56:17 2022 ] Batch(107/162) done. Loss: 0.6502 lr:0.100000 network_time: 0.0306
112
+ [ Thu Sep 15 21:56:57 2022 ] Eval epoch: 17
113
+ [ Thu Sep 15 21:58:45 2022 ] Mean test loss of 930 batches: 3.275714635848999.
114
+ [ Thu Sep 15 21:58:46 2022 ] Top1: 40.58%
115
+ [ Thu Sep 15 21:58:46 2022 ] Top5: 72.06%
116
+ [ Thu Sep 15 21:58:46 2022 ] Training epoch: 18
117
+ [ Thu Sep 15 21:59:23 2022 ] Batch(45/162) done. Loss: 0.6945 lr:0.100000 network_time: 0.0292
118
+ [ Thu Sep 15 22:00:36 2022 ] Batch(145/162) done. Loss: 0.4933 lr:0.100000 network_time: 0.0272
119
+ [ Thu Sep 15 22:00:48 2022 ] Eval epoch: 18
120
+ [ Thu Sep 15 22:02:37 2022 ] Mean test loss of 930 batches: 2.9244513511657715.
121
+ [ Thu Sep 15 22:02:37 2022 ] Top1: 40.86%
122
+ [ Thu Sep 15 22:02:38 2022 ] Top5: 73.57%
123
+ [ Thu Sep 15 22:02:38 2022 ] Training epoch: 19
124
+ [ Thu Sep 15 22:03:42 2022 ] Batch(83/162) done. Loss: 0.3187 lr:0.100000 network_time: 0.0279
125
+ [ Thu Sep 15 22:04:39 2022 ] Eval epoch: 19
126
+ [ Thu Sep 15 22:06:28 2022 ] Mean test loss of 930 batches: 3.0369040966033936.
127
+ [ Thu Sep 15 22:06:29 2022 ] Top1: 40.36%
128
+ [ Thu Sep 15 22:06:29 2022 ] Top5: 72.71%
129
+ [ Thu Sep 15 22:06:29 2022 ] Training epoch: 20
130
+ [ Thu Sep 15 22:06:49 2022 ] Batch(21/162) done. Loss: 0.6907 lr:0.100000 network_time: 0.0270
131
+ [ Thu Sep 15 22:08:01 2022 ] Batch(121/162) done. Loss: 0.6564 lr:0.100000 network_time: 0.0272
132
+ [ Thu Sep 15 22:08:31 2022 ] Eval epoch: 20
133
+ [ Thu Sep 15 22:10:19 2022 ] Mean test loss of 930 batches: 2.658047914505005.
134
+ [ Thu Sep 15 22:10:20 2022 ] Top1: 41.92%
135
+ [ Thu Sep 15 22:10:20 2022 ] Top5: 73.82%
136
+ [ Thu Sep 15 22:10:20 2022 ] Training epoch: 21
137
+ [ Thu Sep 15 22:11:07 2022 ] Batch(59/162) done. Loss: 0.5150 lr:0.100000 network_time: 0.0284
138
+ [ Thu Sep 15 22:12:20 2022 ] Batch(159/162) done. Loss: 0.6909 lr:0.100000 network_time: 0.0268
139
+ [ Thu Sep 15 22:12:22 2022 ] Eval epoch: 21
140
+ [ Thu Sep 15 22:14:11 2022 ] Mean test loss of 930 batches: 2.6667940616607666.
141
+ [ Thu Sep 15 22:14:11 2022 ] Top1: 44.15%
142
+ [ Thu Sep 15 22:14:12 2022 ] Top5: 75.83%
143
+ [ Thu Sep 15 22:14:12 2022 ] Training epoch: 22
144
+ [ Thu Sep 15 22:15:26 2022 ] Batch(97/162) done. Loss: 0.5496 lr:0.100000 network_time: 0.0443
145
+ [ Thu Sep 15 22:16:13 2022 ] Eval epoch: 22
146
+ [ Thu Sep 15 22:18:02 2022 ] Mean test loss of 930 batches: 3.3101744651794434.
147
+ [ Thu Sep 15 22:18:03 2022 ] Top1: 38.63%
148
+ [ Thu Sep 15 22:18:03 2022 ] Top5: 70.21%
149
+ [ Thu Sep 15 22:18:03 2022 ] Training epoch: 23
150
+ [ Thu Sep 15 22:18:33 2022 ] Batch(35/162) done. Loss: 0.4830 lr:0.100000 network_time: 0.0309
151
+ [ Thu Sep 15 22:19:45 2022 ] Batch(135/162) done. Loss: 0.2860 lr:0.100000 network_time: 0.0265
152
+ [ Thu Sep 15 22:20:05 2022 ] Eval epoch: 23
153
+ [ Thu Sep 15 22:21:53 2022 ] Mean test loss of 930 batches: 2.9660871028900146.
154
+ [ Thu Sep 15 22:21:54 2022 ] Top1: 43.51%
155
+ [ Thu Sep 15 22:21:54 2022 ] Top5: 74.93%
156
+ [ Thu Sep 15 22:21:54 2022 ] Training epoch: 24
157
+ [ Thu Sep 15 22:22:51 2022 ] Batch(73/162) done. Loss: 0.4363 lr:0.100000 network_time: 0.0268
158
+ [ Thu Sep 15 22:23:56 2022 ] Eval epoch: 24
159
+ [ Thu Sep 15 22:25:44 2022 ] Mean test loss of 930 batches: 3.1512691974639893.
160
+ [ Thu Sep 15 22:25:45 2022 ] Top1: 42.29%
161
+ [ Thu Sep 15 22:25:45 2022 ] Top5: 71.11%
162
+ [ Thu Sep 15 22:25:46 2022 ] Training epoch: 25
163
+ [ Thu Sep 15 22:25:58 2022 ] Batch(11/162) done. Loss: 0.2536 lr:0.100000 network_time: 0.0271
164
+ [ Thu Sep 15 22:27:10 2022 ] Batch(111/162) done. Loss: 0.6440 lr:0.100000 network_time: 0.0277
165
+ [ Thu Sep 15 22:27:47 2022 ] Eval epoch: 25
166
+ [ Thu Sep 15 22:29:35 2022 ] Mean test loss of 930 batches: 3.2045881748199463.
167
+ [ Thu Sep 15 22:29:36 2022 ] Top1: 41.34%
168
+ [ Thu Sep 15 22:29:36 2022 ] Top5: 72.56%
169
+ [ Thu Sep 15 22:29:36 2022 ] Training epoch: 26
170
+ [ Thu Sep 15 22:30:16 2022 ] Batch(49/162) done. Loss: 0.3471 lr:0.100000 network_time: 0.0248
171
+ [ Thu Sep 15 22:31:28 2022 ] Batch(149/162) done. Loss: 0.6179 lr:0.100000 network_time: 0.0265
172
+ [ Thu Sep 15 22:31:37 2022 ] Eval epoch: 26
173
+ [ Thu Sep 15 22:33:26 2022 ] Mean test loss of 930 batches: 2.7953853607177734.
174
+ [ Thu Sep 15 22:33:27 2022 ] Top1: 45.61%
175
+ [ Thu Sep 15 22:33:27 2022 ] Top5: 75.82%
176
+ [ Thu Sep 15 22:33:28 2022 ] Training epoch: 27
177
+ [ Thu Sep 15 22:34:35 2022 ] Batch(87/162) done. Loss: 0.2963 lr:0.100000 network_time: 0.0268
178
+ [ Thu Sep 15 22:35:29 2022 ] Eval epoch: 27
179
+ [ Thu Sep 15 22:37:18 2022 ] Mean test loss of 930 batches: 3.8976290225982666.
180
+ [ Thu Sep 15 22:37:18 2022 ] Top1: 37.02%
181
+ [ Thu Sep 15 22:37:19 2022 ] Top5: 68.66%
182
+ [ Thu Sep 15 22:37:19 2022 ] Training epoch: 28
183
+ [ Thu Sep 15 22:37:41 2022 ] Batch(25/162) done. Loss: 0.2610 lr:0.100000 network_time: 0.0274
184
+ [ Thu Sep 15 22:38:54 2022 ] Batch(125/162) done. Loss: 0.3643 lr:0.100000 network_time: 0.0256
185
+ [ Thu Sep 15 22:39:20 2022 ] Eval epoch: 28
186
+ [ Thu Sep 15 22:41:09 2022 ] Mean test loss of 930 batches: 3.0510149002075195.
187
+ [ Thu Sep 15 22:41:09 2022 ] Top1: 42.95%
188
+ [ Thu Sep 15 22:41:10 2022 ] Top5: 71.93%
189
+ [ Thu Sep 15 22:41:10 2022 ] Training epoch: 29
190
+ [ Thu Sep 15 22:42:00 2022 ] Batch(63/162) done. Loss: 0.4196 lr:0.100000 network_time: 0.0287
191
+ [ Thu Sep 15 22:43:11 2022 ] Eval epoch: 29
192
+ [ Thu Sep 15 22:45:01 2022 ] Mean test loss of 930 batches: 3.3990092277526855.
193
+ [ Thu Sep 15 22:45:01 2022 ] Top1: 42.01%
194
+ [ Thu Sep 15 22:45:01 2022 ] Top5: 72.58%
195
+ [ Thu Sep 15 22:45:02 2022 ] Training epoch: 30
196
+ [ Thu Sep 15 22:45:07 2022 ] Batch(1/162) done. Loss: 0.4026 lr:0.100000 network_time: 0.0274
197
+ [ Thu Sep 15 22:46:19 2022 ] Batch(101/162) done. Loss: 0.2621 lr:0.100000 network_time: 0.0259
198
+ [ Thu Sep 15 22:47:03 2022 ] Eval epoch: 30
199
+ [ Thu Sep 15 22:48:51 2022 ] Mean test loss of 930 batches: 3.6437642574310303.
200
+ [ Thu Sep 15 22:48:52 2022 ] Top1: 36.19%
201
+ [ Thu Sep 15 22:48:52 2022 ] Top5: 69.46%
202
+ [ Thu Sep 15 22:48:53 2022 ] Training epoch: 31
203
+ [ Thu Sep 15 22:49:25 2022 ] Batch(39/162) done. Loss: 0.3941 lr:0.100000 network_time: 0.0312
204
+ [ Thu Sep 15 22:50:38 2022 ] Batch(139/162) done. Loss: 0.3324 lr:0.100000 network_time: 0.0342
205
+ [ Thu Sep 15 22:50:54 2022 ] Eval epoch: 31
206
+ [ Thu Sep 15 22:52:43 2022 ] Mean test loss of 930 batches: 2.982487440109253.
207
+ [ Thu Sep 15 22:52:43 2022 ] Top1: 43.66%
208
+ [ Thu Sep 15 22:52:44 2022 ] Top5: 75.18%
209
+ [ Thu Sep 15 22:52:44 2022 ] Training epoch: 32
210
+ [ Thu Sep 15 22:53:44 2022 ] Batch(77/162) done. Loss: 0.3935 lr:0.100000 network_time: 0.0261
211
+ [ Thu Sep 15 22:54:45 2022 ] Eval epoch: 32
212
+ [ Thu Sep 15 22:56:33 2022 ] Mean test loss of 930 batches: 3.3070483207702637.
213
+ [ Thu Sep 15 22:56:34 2022 ] Top1: 44.26%
214
+ [ Thu Sep 15 22:56:34 2022 ] Top5: 73.42%
215
+ [ Thu Sep 15 22:56:35 2022 ] Training epoch: 33
216
+ [ Thu Sep 15 22:56:49 2022 ] Batch(15/162) done. Loss: 0.2932 lr:0.100000 network_time: 0.0268
217
+ [ Thu Sep 15 22:58:02 2022 ] Batch(115/162) done. Loss: 0.7155 lr:0.100000 network_time: 0.0276
218
+ [ Thu Sep 15 22:58:36 2022 ] Eval epoch: 33
219
+ [ Thu Sep 15 23:00:24 2022 ] Mean test loss of 930 batches: 3.00728440284729.
220
+ [ Thu Sep 15 23:00:25 2022 ] Top1: 46.30%
221
+ [ Thu Sep 15 23:00:25 2022 ] Top5: 76.53%
222
+ [ Thu Sep 15 23:00:26 2022 ] Training epoch: 34
223
+ [ Thu Sep 15 23:01:08 2022 ] Batch(53/162) done. Loss: 0.4351 lr:0.100000 network_time: 0.0272
224
+ [ Thu Sep 15 23:02:21 2022 ] Batch(153/162) done. Loss: 0.2856 lr:0.100000 network_time: 0.0261
225
+ [ Thu Sep 15 23:02:27 2022 ] Eval epoch: 34
226
+ [ Thu Sep 15 23:04:15 2022 ] Mean test loss of 930 batches: 3.851074695587158.
227
+ [ Thu Sep 15 23:04:16 2022 ] Top1: 39.96%
228
+ [ Thu Sep 15 23:04:16 2022 ] Top5: 68.37%
229
+ [ Thu Sep 15 23:04:16 2022 ] Training epoch: 35
230
+ [ Thu Sep 15 23:05:27 2022 ] Batch(91/162) done. Loss: 0.3399 lr:0.100000 network_time: 0.0322
231
+ [ Thu Sep 15 23:06:18 2022 ] Eval epoch: 35
232
+ [ Thu Sep 15 23:08:07 2022 ] Mean test loss of 930 batches: 3.4936296939849854.
233
+ [ Thu Sep 15 23:08:07 2022 ] Top1: 39.69%
234
+ [ Thu Sep 15 23:08:07 2022 ] Top5: 69.64%
235
+ [ Thu Sep 15 23:08:08 2022 ] Training epoch: 36
236
+ [ Thu Sep 15 23:08:33 2022 ] Batch(29/162) done. Loss: 0.2299 lr:0.100000 network_time: 0.0319
237
+ [ Thu Sep 15 23:09:46 2022 ] Batch(129/162) done. Loss: 0.3337 lr:0.100000 network_time: 0.0254
238
+ [ Thu Sep 15 23:10:09 2022 ] Eval epoch: 36
239
+ [ Thu Sep 15 23:11:58 2022 ] Mean test loss of 930 batches: 3.194895029067993.
240
+ [ Thu Sep 15 23:11:58 2022 ] Top1: 43.84%
241
+ [ Thu Sep 15 23:11:59 2022 ] Top5: 73.81%
242
+ [ Thu Sep 15 23:11:59 2022 ] Training epoch: 37
243
+ [ Thu Sep 15 23:12:51 2022 ] Batch(67/162) done. Loss: 0.2560 lr:0.100000 network_time: 0.0271
244
+ [ Thu Sep 15 23:14:00 2022 ] Eval epoch: 37
245
+ [ Thu Sep 15 23:15:49 2022 ] Mean test loss of 930 batches: 3.691544771194458.
246
+ [ Thu Sep 15 23:15:49 2022 ] Top1: 40.55%
247
+ [ Thu Sep 15 23:15:50 2022 ] Top5: 71.71%
248
+ [ Thu Sep 15 23:15:50 2022 ] Training epoch: 38
249
+ [ Thu Sep 15 23:15:57 2022 ] Batch(5/162) done. Loss: 0.3489 lr:0.100000 network_time: 0.0265
250
+ [ Thu Sep 15 23:17:10 2022 ] Batch(105/162) done. Loss: 0.2590 lr:0.100000 network_time: 0.0269
251
+ [ Thu Sep 15 23:17:51 2022 ] Eval epoch: 38
252
+ [ Thu Sep 15 23:19:40 2022 ] Mean test loss of 930 batches: 3.0611417293548584.
253
+ [ Thu Sep 15 23:19:40 2022 ] Top1: 41.27%
254
+ [ Thu Sep 15 23:19:41 2022 ] Top5: 72.73%
255
+ [ Thu Sep 15 23:19:41 2022 ] Training epoch: 39
256
+ [ Thu Sep 15 23:20:17 2022 ] Batch(43/162) done. Loss: 0.1300 lr:0.100000 network_time: 0.0293
257
+ [ Thu Sep 15 23:21:29 2022 ] Batch(143/162) done. Loss: 0.2533 lr:0.100000 network_time: 0.0261
258
+ [ Thu Sep 15 23:21:42 2022 ] Eval epoch: 39
259
+ [ Thu Sep 15 23:23:32 2022 ] Mean test loss of 930 batches: 2.9703688621520996.
260
+ [ Thu Sep 15 23:23:32 2022 ] Top1: 44.27%
261
+ [ Thu Sep 15 23:23:32 2022 ] Top5: 74.68%
262
+ [ Thu Sep 15 23:23:33 2022 ] Training epoch: 40
263
+ [ Thu Sep 15 23:24:35 2022 ] Batch(81/162) done. Loss: 0.3066 lr:0.100000 network_time: 0.0266
264
+ [ Thu Sep 15 23:25:34 2022 ] Eval epoch: 40
265
+ [ Thu Sep 15 23:27:23 2022 ] Mean test loss of 930 batches: 3.1744678020477295.
266
+ [ Thu Sep 15 23:27:24 2022 ] Top1: 47.07%
267
+ [ Thu Sep 15 23:27:24 2022 ] Top5: 75.50%
268
+ [ Thu Sep 15 23:27:25 2022 ] Training epoch: 41
269
+ [ Thu Sep 15 23:27:42 2022 ] Batch(19/162) done. Loss: 0.2931 lr:0.100000 network_time: 0.0363
270
+ [ Thu Sep 15 23:28:55 2022 ] Batch(119/162) done. Loss: 0.5136 lr:0.100000 network_time: 0.0293
271
+ [ Thu Sep 15 23:29:26 2022 ] Eval epoch: 41
272
+ [ Thu Sep 15 23:31:15 2022 ] Mean test loss of 930 batches: 3.718257427215576.
273
+ [ Thu Sep 15 23:31:15 2022 ] Top1: 37.00%
274
+ [ Thu Sep 15 23:31:15 2022 ] Top5: 69.65%
275
+ [ Thu Sep 15 23:31:16 2022 ] Training epoch: 42
276
+ [ Thu Sep 15 23:32:01 2022 ] Batch(57/162) done. Loss: 0.2064 lr:0.100000 network_time: 0.0269
277
+ [ Thu Sep 15 23:33:13 2022 ] Batch(157/162) done. Loss: 0.2419 lr:0.100000 network_time: 0.0330
278
+ [ Thu Sep 15 23:33:17 2022 ] Eval epoch: 42
279
+ [ Thu Sep 15 23:35:06 2022 ] Mean test loss of 930 batches: 2.983041524887085.
280
+ [ Thu Sep 15 23:35:06 2022 ] Top1: 46.51%
281
+ [ Thu Sep 15 23:35:06 2022 ] Top5: 75.47%
282
+ [ Thu Sep 15 23:35:07 2022 ] Training epoch: 43
283
+ [ Thu Sep 15 23:36:20 2022 ] Batch(95/162) done. Loss: 0.3565 lr:0.100000 network_time: 0.0271
284
+ [ Thu Sep 15 23:37:08 2022 ] Eval epoch: 43
285
+ [ Thu Sep 15 23:38:57 2022 ] Mean test loss of 930 batches: 3.6973397731781006.
286
+ [ Thu Sep 15 23:38:57 2022 ] Top1: 38.11%
287
+ [ Thu Sep 15 23:38:57 2022 ] Top5: 70.05%
288
+ [ Thu Sep 15 23:38:58 2022 ] Training epoch: 44
289
+ [ Thu Sep 15 23:39:26 2022 ] Batch(33/162) done. Loss: 0.2877 lr:0.100000 network_time: 0.0264
290
+ [ Thu Sep 15 23:40:38 2022 ] Batch(133/162) done. Loss: 0.3982 lr:0.100000 network_time: 0.0310
291
+ [ Thu Sep 15 23:40:59 2022 ] Eval epoch: 44
292
+ [ Thu Sep 15 23:42:48 2022 ] Mean test loss of 930 batches: 3.175381660461426.
293
+ [ Thu Sep 15 23:42:48 2022 ] Top1: 40.97%
294
+ [ Thu Sep 15 23:42:48 2022 ] Top5: 73.30%
295
+ [ Thu Sep 15 23:42:49 2022 ] Training epoch: 45
296
+ [ Thu Sep 15 23:43:44 2022 ] Batch(71/162) done. Loss: 0.1432 lr:0.100000 network_time: 0.0272
297
+ [ Thu Sep 15 23:44:50 2022 ] Eval epoch: 45
298
+ [ Thu Sep 15 23:46:38 2022 ] Mean test loss of 930 batches: 3.595310688018799.
299
+ [ Thu Sep 15 23:46:39 2022 ] Top1: 41.20%
300
+ [ Thu Sep 15 23:46:39 2022 ] Top5: 71.01%
301
+ [ Thu Sep 15 23:46:40 2022 ] Training epoch: 46
302
+ [ Thu Sep 15 23:46:50 2022 ] Batch(9/162) done. Loss: 0.1890 lr:0.100000 network_time: 0.0291
303
+ [ Thu Sep 15 23:48:03 2022 ] Batch(109/162) done. Loss: 0.2266 lr:0.100000 network_time: 0.0320
304
+ [ Thu Sep 15 23:48:41 2022 ] Eval epoch: 46
305
+ [ Thu Sep 15 23:50:30 2022 ] Mean test loss of 930 batches: 2.9090216159820557.
306
+ [ Thu Sep 15 23:50:30 2022 ] Top1: 46.62%
307
+ [ Thu Sep 15 23:50:31 2022 ] Top5: 76.53%
308
+ [ Thu Sep 15 23:50:31 2022 ] Training epoch: 47
309
+ [ Thu Sep 15 23:51:09 2022 ] Batch(47/162) done. Loss: 0.0817 lr:0.100000 network_time: 0.0278
310
+ [ Thu Sep 15 23:52:22 2022 ] Batch(147/162) done. Loss: 0.2669 lr:0.100000 network_time: 0.0342
311
+ [ Thu Sep 15 23:52:32 2022 ] Eval epoch: 47
312
+ [ Thu Sep 15 23:54:20 2022 ] Mean test loss of 930 batches: 3.4526474475860596.
313
+ [ Thu Sep 15 23:54:21 2022 ] Top1: 42.55%
314
+ [ Thu Sep 15 23:54:21 2022 ] Top5: 72.60%
315
+ [ Thu Sep 15 23:54:22 2022 ] Training epoch: 48
316
+ [ Thu Sep 15 23:55:27 2022 ] Batch(85/162) done. Loss: 0.2879 lr:0.100000 network_time: 0.0278
317
+ [ Thu Sep 15 23:56:23 2022 ] Eval epoch: 48
318
+ [ Thu Sep 15 23:58:11 2022 ] Mean test loss of 930 batches: 3.220905065536499.
319
+ [ Thu Sep 15 23:58:12 2022 ] Top1: 45.22%
320
+ [ Thu Sep 15 23:58:12 2022 ] Top5: 73.68%
321
+ [ Thu Sep 15 23:58:12 2022 ] Training epoch: 49
322
+ [ Thu Sep 15 23:58:33 2022 ] Batch(23/162) done. Loss: 0.1667 lr:0.100000 network_time: 0.0278
323
+ [ Thu Sep 15 23:59:46 2022 ] Batch(123/162) done. Loss: 0.1133 lr:0.100000 network_time: 0.0319
324
+ [ Fri Sep 16 00:00:13 2022 ] Eval epoch: 49
325
+ [ Fri Sep 16 00:02:02 2022 ] Mean test loss of 930 batches: 3.258099317550659.
326
+ [ Fri Sep 16 00:02:02 2022 ] Top1: 44.87%
327
+ [ Fri Sep 16 00:02:03 2022 ] Top5: 74.91%
328
+ [ Fri Sep 16 00:02:03 2022 ] Training epoch: 50
329
+ [ Fri Sep 16 00:02:51 2022 ] Batch(61/162) done. Loss: 0.1385 lr:0.100000 network_time: 0.0285
330
+ [ Fri Sep 16 00:04:04 2022 ] Batch(161/162) done. Loss: 0.2105 lr:0.100000 network_time: 0.0310
331
+ [ Fri Sep 16 00:04:04 2022 ] Eval epoch: 50
332
+ [ Fri Sep 16 00:05:53 2022 ] Mean test loss of 930 batches: 3.1035125255584717.
333
+ [ Fri Sep 16 00:05:54 2022 ] Top1: 46.88%
334
+ [ Fri Sep 16 00:05:54 2022 ] Top5: 77.57%
335
+ [ Fri Sep 16 00:05:54 2022 ] Training epoch: 51
336
+ [ Fri Sep 16 00:07:10 2022 ] Batch(99/162) done. Loss: 0.2091 lr:0.100000 network_time: 0.0273
337
+ [ Fri Sep 16 00:07:55 2022 ] Eval epoch: 51
338
+ [ Fri Sep 16 00:09:44 2022 ] Mean test loss of 930 batches: 2.9262797832489014.
339
+ [ Fri Sep 16 00:09:45 2022 ] Top1: 47.21%
340
+ [ Fri Sep 16 00:09:45 2022 ] Top5: 76.63%
341
+ [ Fri Sep 16 00:09:45 2022 ] Training epoch: 52
342
+ [ Fri Sep 16 00:10:16 2022 ] Batch(37/162) done. Loss: 0.2059 lr:0.100000 network_time: 0.0318
343
+ [ Fri Sep 16 00:11:29 2022 ] Batch(137/162) done. Loss: 0.2774 lr:0.100000 network_time: 0.0283
344
+ [ Fri Sep 16 00:11:46 2022 ] Eval epoch: 52
345
+ [ Fri Sep 16 00:13:35 2022 ] Mean test loss of 930 batches: 3.9441447257995605.
346
+ [ Fri Sep 16 00:13:36 2022 ] Top1: 39.44%
347
+ [ Fri Sep 16 00:13:36 2022 ] Top5: 69.58%
348
+ [ Fri Sep 16 00:13:36 2022 ] Training epoch: 53
349
+ [ Fri Sep 16 00:14:35 2022 ] Batch(75/162) done. Loss: 0.2692 lr:0.100000 network_time: 0.0272
350
+ [ Fri Sep 16 00:15:37 2022 ] Eval epoch: 53
351
+ [ Fri Sep 16 00:17:26 2022 ] Mean test loss of 930 batches: 3.2309017181396484.
352
+ [ Fri Sep 16 00:17:26 2022 ] Top1: 46.12%
353
+ [ Fri Sep 16 00:17:27 2022 ] Top5: 75.79%
354
+ [ Fri Sep 16 00:17:27 2022 ] Training epoch: 54
355
+ [ Fri Sep 16 00:17:40 2022 ] Batch(13/162) done. Loss: 0.1146 lr:0.100000 network_time: 0.0309
356
+ [ Fri Sep 16 00:18:53 2022 ] Batch(113/162) done. Loss: 0.2228 lr:0.100000 network_time: 0.0282
357
+ [ Fri Sep 16 00:19:28 2022 ] Eval epoch: 54
358
+ [ Fri Sep 16 00:21:17 2022 ] Mean test loss of 930 batches: 2.87821364402771.
359
+ [ Fri Sep 16 00:21:17 2022 ] Top1: 47.83%
360
+ [ Fri Sep 16 00:21:18 2022 ] Top5: 76.74%
361
+ [ Fri Sep 16 00:21:18 2022 ] Training epoch: 55
362
+ [ Fri Sep 16 00:21:59 2022 ] Batch(51/162) done. Loss: 0.2163 lr:0.100000 network_time: 0.0272
363
+ [ Fri Sep 16 00:23:12 2022 ] Batch(151/162) done. Loss: 0.1489 lr:0.100000 network_time: 0.0260
364
+ [ Fri Sep 16 00:23:19 2022 ] Eval epoch: 55
365
+ [ Fri Sep 16 00:25:08 2022 ] Mean test loss of 930 batches: 3.5154900550842285.
366
+ [ Fri Sep 16 00:25:08 2022 ] Top1: 43.40%
367
+ [ Fri Sep 16 00:25:08 2022 ] Top5: 71.89%
368
+ [ Fri Sep 16 00:25:09 2022 ] Training epoch: 56
369
+ [ Fri Sep 16 00:26:17 2022 ] Batch(89/162) done. Loss: 0.0506 lr:0.100000 network_time: 0.0277
370
+ [ Fri Sep 16 00:27:10 2022 ] Eval epoch: 56
371
+ [ Fri Sep 16 00:28:59 2022 ] Mean test loss of 930 batches: 2.9366331100463867.
372
+ [ Fri Sep 16 00:29:00 2022 ] Top1: 48.54%
373
+ [ Fri Sep 16 00:29:00 2022 ] Top5: 77.41%
374
+ [ Fri Sep 16 00:29:00 2022 ] Training epoch: 57
375
+ [ Fri Sep 16 00:29:24 2022 ] Batch(27/162) done. Loss: 0.1010 lr:0.100000 network_time: 0.0288
376
+ [ Fri Sep 16 00:30:37 2022 ] Batch(127/162) done. Loss: 0.1355 lr:0.100000 network_time: 0.0316
377
+ [ Fri Sep 16 00:31:01 2022 ] Eval epoch: 57
378
+ [ Fri Sep 16 00:32:50 2022 ] Mean test loss of 930 batches: 2.7375779151916504.
379
+ [ Fri Sep 16 00:32:51 2022 ] Top1: 49.76%
380
+ [ Fri Sep 16 00:32:51 2022 ] Top5: 77.95%
381
+ [ Fri Sep 16 00:32:51 2022 ] Training epoch: 58
382
+ [ Fri Sep 16 00:33:43 2022 ] Batch(65/162) done. Loss: 0.1278 lr:0.100000 network_time: 0.0278
383
+ [ Fri Sep 16 00:34:53 2022 ] Eval epoch: 58
384
+ [ Fri Sep 16 00:36:42 2022 ] Mean test loss of 930 batches: 3.172297239303589.
385
+ [ Fri Sep 16 00:36:42 2022 ] Top1: 47.46%
386
+ [ Fri Sep 16 00:36:42 2022 ] Top5: 75.36%
387
+ [ Fri Sep 16 00:36:43 2022 ] Training epoch: 59
388
+ [ Fri Sep 16 00:36:49 2022 ] Batch(3/162) done. Loss: 0.0779 lr:0.100000 network_time: 0.0329
389
+ [ Fri Sep 16 00:38:01 2022 ] Batch(103/162) done. Loss: 0.1418 lr:0.100000 network_time: 0.0267
390
+ [ Fri Sep 16 00:38:44 2022 ] Eval epoch: 59
391
+ [ Fri Sep 16 00:40:32 2022 ] Mean test loss of 930 batches: 2.990083694458008.
392
+ [ Fri Sep 16 00:40:33 2022 ] Top1: 47.78%
393
+ [ Fri Sep 16 00:40:33 2022 ] Top5: 76.48%
394
+ [ Fri Sep 16 00:40:33 2022 ] Training epoch: 60
395
+ [ Fri Sep 16 00:41:07 2022 ] Batch(41/162) done. Loss: 0.2073 lr:0.100000 network_time: 0.0301
396
+ [ Fri Sep 16 00:42:20 2022 ] Batch(141/162) done. Loss: 0.0576 lr:0.100000 network_time: 0.0277
397
+ [ Fri Sep 16 00:42:34 2022 ] Eval epoch: 60
398
+ [ Fri Sep 16 00:44:23 2022 ] Mean test loss of 930 batches: 2.775873899459839.
399
+ [ Fri Sep 16 00:44:24 2022 ] Top1: 50.88%
400
+ [ Fri Sep 16 00:44:24 2022 ] Top5: 78.18%
401
+ [ Fri Sep 16 00:44:25 2022 ] Training epoch: 61
402
+ [ Fri Sep 16 00:45:26 2022 ] Batch(79/162) done. Loss: 0.0945 lr:0.010000 network_time: 0.0278
403
+ [ Fri Sep 16 00:46:26 2022 ] Eval epoch: 61
404
+ [ Fri Sep 16 00:48:14 2022 ] Mean test loss of 930 batches: 2.546633243560791.
405
+ [ Fri Sep 16 00:48:15 2022 ] Top1: 53.51%
406
+ [ Fri Sep 16 00:48:15 2022 ] Top5: 81.01%
407
+ [ Fri Sep 16 00:48:16 2022 ] Training epoch: 62
408
+ [ Fri Sep 16 00:48:32 2022 ] Batch(17/162) done. Loss: 0.0171 lr:0.010000 network_time: 0.0321
409
+ [ Fri Sep 16 00:49:44 2022 ] Batch(117/162) done. Loss: 0.0262 lr:0.010000 network_time: 0.0321
410
+ [ Fri Sep 16 00:50:16 2022 ] Eval epoch: 62
411
+ [ Fri Sep 16 00:52:06 2022 ] Mean test loss of 930 batches: 2.4446537494659424.
412
+ [ Fri Sep 16 00:52:06 2022 ] Top1: 55.29%
413
+ [ Fri Sep 16 00:52:07 2022 ] Top5: 81.98%
414
+ [ Fri Sep 16 00:52:07 2022 ] Training epoch: 63
415
+ [ Fri Sep 16 00:52:51 2022 ] Batch(55/162) done. Loss: 0.0085 lr:0.010000 network_time: 0.0275
416
+ [ Fri Sep 16 00:54:03 2022 ] Batch(155/162) done. Loss: 0.0210 lr:0.010000 network_time: 0.0270
417
+ [ Fri Sep 16 00:54:08 2022 ] Eval epoch: 63
418
+ [ Fri Sep 16 00:55:57 2022 ] Mean test loss of 930 batches: 2.4653677940368652.
419
+ [ Fri Sep 16 00:55:57 2022 ] Top1: 55.56%
420
+ [ Fri Sep 16 00:55:58 2022 ] Top5: 82.16%
421
+ [ Fri Sep 16 00:55:58 2022 ] Training epoch: 64
422
+ [ Fri Sep 16 00:57:10 2022 ] Batch(93/162) done. Loss: 0.0419 lr:0.010000 network_time: 0.0277
423
+ [ Fri Sep 16 00:57:59 2022 ] Eval epoch: 64
424
+ [ Fri Sep 16 00:59:48 2022 ] Mean test loss of 930 batches: 2.5171265602111816.
425
+ [ Fri Sep 16 00:59:48 2022 ] Top1: 54.31%
426
+ [ Fri Sep 16 00:59:49 2022 ] Top5: 81.78%
427
+ [ Fri Sep 16 00:59:49 2022 ] Training epoch: 65
428
+ [ Fri Sep 16 01:00:15 2022 ] Batch(31/162) done. Loss: 0.0102 lr:0.010000 network_time: 0.0266
429
+ [ Fri Sep 16 01:01:28 2022 ] Batch(131/162) done. Loss: 0.0297 lr:0.010000 network_time: 0.0288
430
+ [ Fri Sep 16 01:01:50 2022 ] Eval epoch: 65
431
+ [ Fri Sep 16 01:03:39 2022 ] Mean test loss of 930 batches: 2.46934175491333.
432
+ [ Fri Sep 16 01:03:39 2022 ] Top1: 55.12%
433
+ [ Fri Sep 16 01:03:40 2022 ] Top5: 81.94%
434
+ [ Fri Sep 16 01:03:40 2022 ] Training epoch: 66
435
+ [ Fri Sep 16 01:04:34 2022 ] Batch(69/162) done. Loss: 0.0395 lr:0.010000 network_time: 0.0301
436
+ [ Fri Sep 16 01:05:41 2022 ] Eval epoch: 66
437
+ [ Fri Sep 16 01:07:31 2022 ] Mean test loss of 930 batches: 2.5224382877349854.
438
+ [ Fri Sep 16 01:07:31 2022 ] Top1: 55.47%
439
+ [ Fri Sep 16 01:07:32 2022 ] Top5: 81.80%
440
+ [ Fri Sep 16 01:07:32 2022 ] Training epoch: 67
441
+ [ Fri Sep 16 01:07:41 2022 ] Batch(7/162) done. Loss: 0.0076 lr:0.010000 network_time: 0.0296
442
+ [ Fri Sep 16 01:08:54 2022 ] Batch(107/162) done. Loss: 0.0097 lr:0.010000 network_time: 0.0279
443
+ [ Fri Sep 16 01:09:33 2022 ] Eval epoch: 67
444
+ [ Fri Sep 16 01:11:22 2022 ] Mean test loss of 930 batches: 2.4902591705322266.
445
+ [ Fri Sep 16 01:11:22 2022 ] Top1: 55.35%
446
+ [ Fri Sep 16 01:11:23 2022 ] Top5: 81.95%
447
+ [ Fri Sep 16 01:11:23 2022 ] Training epoch: 68
448
+ [ Fri Sep 16 01:12:00 2022 ] Batch(45/162) done. Loss: 0.0064 lr:0.010000 network_time: 0.0280
449
+ [ Fri Sep 16 01:13:12 2022 ] Batch(145/162) done. Loss: 0.0057 lr:0.010000 network_time: 0.0269
450
+ [ Fri Sep 16 01:13:24 2022 ] Eval epoch: 68
451
+ [ Fri Sep 16 01:15:13 2022 ] Mean test loss of 930 batches: 2.47910737991333.
452
+ [ Fri Sep 16 01:15:13 2022 ] Top1: 55.58%
453
+ [ Fri Sep 16 01:15:14 2022 ] Top5: 82.24%
454
+ [ Fri Sep 16 01:15:14 2022 ] Training epoch: 69
455
+ [ Fri Sep 16 01:16:18 2022 ] Batch(83/162) done. Loss: 0.0091 lr:0.010000 network_time: 0.0289
456
+ [ Fri Sep 16 01:17:15 2022 ] Eval epoch: 69
457
+ [ Fri Sep 16 01:19:04 2022 ] Mean test loss of 930 batches: 2.5265135765075684.
458
+ [ Fri Sep 16 01:19:04 2022 ] Top1: 54.84%
459
+ [ Fri Sep 16 01:19:05 2022 ] Top5: 81.86%
460
+ [ Fri Sep 16 01:19:05 2022 ] Training epoch: 70
461
+ [ Fri Sep 16 01:19:25 2022 ] Batch(21/162) done. Loss: 0.0230 lr:0.010000 network_time: 0.0311
462
+ [ Fri Sep 16 01:20:37 2022 ] Batch(121/162) done. Loss: 0.0078 lr:0.010000 network_time: 0.0326
463
+ [ Fri Sep 16 01:21:06 2022 ] Eval epoch: 70
464
+ [ Fri Sep 16 01:22:55 2022 ] Mean test loss of 930 batches: 2.474184036254883.
465
+ [ Fri Sep 16 01:22:55 2022 ] Top1: 55.45%
466
+ [ Fri Sep 16 01:22:56 2022 ] Top5: 82.10%
467
+ [ Fri Sep 16 01:22:56 2022 ] Training epoch: 71
468
+ [ Fri Sep 16 01:23:43 2022 ] Batch(59/162) done. Loss: 0.0104 lr:0.010000 network_time: 0.0270
469
+ [ Fri Sep 16 01:24:55 2022 ] Batch(159/162) done. Loss: 0.0061 lr:0.010000 network_time: 0.0309
470
+ [ Fri Sep 16 01:24:57 2022 ] Eval epoch: 71
471
+ [ Fri Sep 16 01:26:46 2022 ] Mean test loss of 930 batches: 2.546384572982788.
472
+ [ Fri Sep 16 01:26:47 2022 ] Top1: 54.93%
473
+ [ Fri Sep 16 01:26:47 2022 ] Top5: 81.87%
474
+ [ Fri Sep 16 01:26:47 2022 ] Training epoch: 72
475
+ [ Fri Sep 16 01:28:02 2022 ] Batch(97/162) done. Loss: 0.0070 lr:0.010000 network_time: 0.0311
476
+ [ Fri Sep 16 01:28:48 2022 ] Eval epoch: 72
477
+ [ Fri Sep 16 01:30:38 2022 ] Mean test loss of 930 batches: 2.5100717544555664.
478
+ [ Fri Sep 16 01:30:38 2022 ] Top1: 55.77%
479
+ [ Fri Sep 16 01:30:38 2022 ] Top5: 82.31%
480
+ [ Fri Sep 16 01:30:39 2022 ] Training epoch: 73
481
+ [ Fri Sep 16 01:31:08 2022 ] Batch(35/162) done. Loss: 0.0055 lr:0.010000 network_time: 0.0316
482
+ [ Fri Sep 16 01:32:21 2022 ] Batch(135/162) done. Loss: 0.0062 lr:0.010000 network_time: 0.0302
483
+ [ Fri Sep 16 01:32:40 2022 ] Eval epoch: 73
484
+ [ Fri Sep 16 01:34:28 2022 ] Mean test loss of 930 batches: 2.4956727027893066.
485
+ [ Fri Sep 16 01:34:29 2022 ] Top1: 56.07%
486
+ [ Fri Sep 16 01:34:29 2022 ] Top5: 82.33%
487
+ [ Fri Sep 16 01:34:29 2022 ] Training epoch: 74
488
+ [ Fri Sep 16 01:35:26 2022 ] Batch(73/162) done. Loss: 0.0058 lr:0.010000 network_time: 0.0304
489
+ [ Fri Sep 16 01:36:30 2022 ] Eval epoch: 74
490
+ [ Fri Sep 16 01:38:19 2022 ] Mean test loss of 930 batches: 2.4992711544036865.
491
+ [ Fri Sep 16 01:38:19 2022 ] Top1: 55.49%
492
+ [ Fri Sep 16 01:38:19 2022 ] Top5: 82.19%
493
+ [ Fri Sep 16 01:38:20 2022 ] Training epoch: 75
494
+ [ Fri Sep 16 01:38:32 2022 ] Batch(11/162) done. Loss: 0.0068 lr:0.010000 network_time: 0.0274
495
+ [ Fri Sep 16 01:39:44 2022 ] Batch(111/162) done. Loss: 0.0088 lr:0.010000 network_time: 0.0283
496
+ [ Fri Sep 16 01:40:21 2022 ] Eval epoch: 75
497
+ [ Fri Sep 16 01:42:09 2022 ] Mean test loss of 930 batches: 2.5417563915252686.
498
+ [ Fri Sep 16 01:42:10 2022 ] Top1: 54.24%
499
+ [ Fri Sep 16 01:42:10 2022 ] Top5: 81.63%
500
+ [ Fri Sep 16 01:42:10 2022 ] Training epoch: 76
501
+ [ Fri Sep 16 01:42:50 2022 ] Batch(49/162) done. Loss: 0.0113 lr:0.010000 network_time: 0.0317
502
+ [ Fri Sep 16 01:44:02 2022 ] Batch(149/162) done. Loss: 0.0099 lr:0.010000 network_time: 0.0267
503
+ [ Fri Sep 16 01:44:11 2022 ] Eval epoch: 76
504
+ [ Fri Sep 16 01:46:00 2022 ] Mean test loss of 930 batches: 2.5105996131896973.
505
+ [ Fri Sep 16 01:46:00 2022 ] Top1: 55.93%
506
+ [ Fri Sep 16 01:46:01 2022 ] Top5: 82.12%
507
+ [ Fri Sep 16 01:46:01 2022 ] Training epoch: 77
508
+ [ Fri Sep 16 01:47:08 2022 ] Batch(87/162) done. Loss: 0.0104 lr:0.010000 network_time: 0.0266
509
+ [ Fri Sep 16 01:48:02 2022 ] Eval epoch: 77
510
+ [ Fri Sep 16 01:49:50 2022 ] Mean test loss of 930 batches: 2.4512035846710205.
511
+ [ Fri Sep 16 01:49:51 2022 ] Top1: 55.85%
512
+ [ Fri Sep 16 01:49:51 2022 ] Top5: 82.47%
513
+ [ Fri Sep 16 01:49:51 2022 ] Training epoch: 78
514
+ [ Fri Sep 16 01:50:14 2022 ] Batch(25/162) done. Loss: 0.0062 lr:0.010000 network_time: 0.0299
515
+ [ Fri Sep 16 01:51:26 2022 ] Batch(125/162) done. Loss: 0.0035 lr:0.010000 network_time: 0.0276
516
+ [ Fri Sep 16 01:51:53 2022 ] Eval epoch: 78
517
+ [ Fri Sep 16 01:53:42 2022 ] Mean test loss of 930 batches: 2.4763002395629883.
518
+ [ Fri Sep 16 01:53:42 2022 ] Top1: 55.33%
519
+ [ Fri Sep 16 01:53:43 2022 ] Top5: 82.01%
520
+ [ Fri Sep 16 01:53:43 2022 ] Training epoch: 79
521
+ [ Fri Sep 16 01:54:33 2022 ] Batch(63/162) done. Loss: 0.0081 lr:0.010000 network_time: 0.0294
522
+ [ Fri Sep 16 01:55:44 2022 ] Eval epoch: 79
523
+ [ Fri Sep 16 01:57:33 2022 ] Mean test loss of 930 batches: 2.486677646636963.
524
+ [ Fri Sep 16 01:57:33 2022 ] Top1: 55.50%
525
+ [ Fri Sep 16 01:57:34 2022 ] Top5: 82.13%
526
+ [ Fri Sep 16 01:57:34 2022 ] Training epoch: 80
527
+ [ Fri Sep 16 01:57:39 2022 ] Batch(1/162) done. Loss: 0.0030 lr:0.010000 network_time: 0.0301
528
+ [ Fri Sep 16 01:58:51 2022 ] Batch(101/162) done. Loss: 0.0107 lr:0.010000 network_time: 0.0270
529
+ [ Fri Sep 16 01:59:35 2022 ] Eval epoch: 80
530
+ [ Fri Sep 16 02:01:24 2022 ] Mean test loss of 930 batches: 2.5012757778167725.
531
+ [ Fri Sep 16 02:01:24 2022 ] Top1: 55.39%
532
+ [ Fri Sep 16 02:01:25 2022 ] Top5: 82.13%
533
+ [ Fri Sep 16 02:01:25 2022 ] Training epoch: 81
534
+ [ Fri Sep 16 02:01:57 2022 ] Batch(39/162) done. Loss: 0.0049 lr:0.001000 network_time: 0.0252
535
+ [ Fri Sep 16 02:03:10 2022 ] Batch(139/162) done. Loss: 0.0029 lr:0.001000 network_time: 0.0298
536
+ [ Fri Sep 16 02:03:26 2022 ] Eval epoch: 81
537
+ [ Fri Sep 16 02:05:14 2022 ] Mean test loss of 930 batches: 2.506415843963623.
538
+ [ Fri Sep 16 02:05:14 2022 ] Top1: 55.65%
539
+ [ Fri Sep 16 02:05:15 2022 ] Top5: 82.03%
540
+ [ Fri Sep 16 02:05:15 2022 ] Training epoch: 82
541
+ [ Fri Sep 16 02:06:15 2022 ] Batch(77/162) done. Loss: 0.0062 lr:0.001000 network_time: 0.0330
542
+ [ Fri Sep 16 02:07:16 2022 ] Eval epoch: 82
543
+ [ Fri Sep 16 02:09:05 2022 ] Mean test loss of 930 batches: 2.457895278930664.
544
+ [ Fri Sep 16 02:09:05 2022 ] Top1: 56.04%
545
+ [ Fri Sep 16 02:09:06 2022 ] Top5: 82.38%
546
+ [ Fri Sep 16 02:09:06 2022 ] Training epoch: 83
547
+ [ Fri Sep 16 02:09:21 2022 ] Batch(15/162) done. Loss: 0.0077 lr:0.001000 network_time: 0.0274
548
+ [ Fri Sep 16 02:10:33 2022 ] Batch(115/162) done. Loss: 0.0077 lr:0.001000 network_time: 0.0297
549
+ [ Fri Sep 16 02:11:07 2022 ] Eval epoch: 83
550
+ [ Fri Sep 16 02:12:55 2022 ] Mean test loss of 930 batches: 2.492645025253296.
551
+ [ Fri Sep 16 02:12:56 2022 ] Top1: 55.22%
552
+ [ Fri Sep 16 02:12:56 2022 ] Top5: 82.08%
553
+ [ Fri Sep 16 02:12:57 2022 ] Training epoch: 84
554
+ [ Fri Sep 16 02:13:39 2022 ] Batch(53/162) done. Loss: 0.0060 lr:0.001000 network_time: 0.0295
555
+ [ Fri Sep 16 02:14:52 2022 ] Batch(153/162) done. Loss: 0.0130 lr:0.001000 network_time: 0.0274
556
+ [ Fri Sep 16 02:14:58 2022 ] Eval epoch: 84
557
+ [ Fri Sep 16 02:16:47 2022 ] Mean test loss of 930 batches: 2.51953125.
558
+ [ Fri Sep 16 02:16:47 2022 ] Top1: 56.11%
559
+ [ Fri Sep 16 02:16:48 2022 ] Top5: 82.06%
560
+ [ Fri Sep 16 02:16:48 2022 ] Training epoch: 85
561
+ [ Fri Sep 16 02:17:58 2022 ] Batch(91/162) done. Loss: 0.0052 lr:0.001000 network_time: 0.0278
562
+ [ Fri Sep 16 02:18:49 2022 ] Eval epoch: 85
563
+ [ Fri Sep 16 02:20:38 2022 ] Mean test loss of 930 batches: 2.5209572315216064.
564
+ [ Fri Sep 16 02:20:39 2022 ] Top1: 55.48%
565
+ [ Fri Sep 16 02:20:39 2022 ] Top5: 82.19%
566
+ [ Fri Sep 16 02:20:40 2022 ] Training epoch: 86
567
+ [ Fri Sep 16 02:21:05 2022 ] Batch(29/162) done. Loss: 0.0048 lr:0.001000 network_time: 0.0270
568
+ [ Fri Sep 16 02:22:17 2022 ] Batch(129/162) done. Loss: 0.0119 lr:0.001000 network_time: 0.0269
569
+ [ Fri Sep 16 02:22:41 2022 ] Eval epoch: 86
570
+ [ Fri Sep 16 02:24:30 2022 ] Mean test loss of 930 batches: 2.516221523284912.
571
+ [ Fri Sep 16 02:24:30 2022 ] Top1: 54.71%
572
+ [ Fri Sep 16 02:24:31 2022 ] Top5: 81.78%
573
+ [ Fri Sep 16 02:24:31 2022 ] Training epoch: 87
574
+ [ Fri Sep 16 02:25:24 2022 ] Batch(67/162) done. Loss: 0.0088 lr:0.001000 network_time: 0.0544
575
+ [ Fri Sep 16 02:26:32 2022 ] Eval epoch: 87
576
+ [ Fri Sep 16 02:28:21 2022 ] Mean test loss of 930 batches: 2.474106550216675.
577
+ [ Fri Sep 16 02:28:21 2022 ] Top1: 56.02%
578
+ [ Fri Sep 16 02:28:22 2022 ] Top5: 82.42%
579
+ [ Fri Sep 16 02:28:22 2022 ] Training epoch: 88
580
+ [ Fri Sep 16 02:28:29 2022 ] Batch(5/162) done. Loss: 0.0110 lr:0.001000 network_time: 0.0265
581
+ [ Fri Sep 16 02:29:42 2022 ] Batch(105/162) done. Loss: 0.0054 lr:0.001000 network_time: 0.0281
582
+ [ Fri Sep 16 02:30:23 2022 ] Eval epoch: 88
583
+ [ Fri Sep 16 02:32:11 2022 ] Mean test loss of 930 batches: 2.4749226570129395.
584
+ [ Fri Sep 16 02:32:12 2022 ] Top1: 55.66%
585
+ [ Fri Sep 16 02:32:12 2022 ] Top5: 82.22%
586
+ [ Fri Sep 16 02:32:13 2022 ] Training epoch: 89
587
+ [ Fri Sep 16 02:32:48 2022 ] Batch(43/162) done. Loss: 0.0048 lr:0.001000 network_time: 0.0293
588
+ [ Fri Sep 16 02:34:00 2022 ] Batch(143/162) done. Loss: 0.0074 lr:0.001000 network_time: 0.0271
589
+ [ Fri Sep 16 02:34:14 2022 ] Eval epoch: 89
590
+ [ Fri Sep 16 02:36:02 2022 ] Mean test loss of 930 batches: 2.5148983001708984.
591
+ [ Fri Sep 16 02:36:02 2022 ] Top1: 54.74%
592
+ [ Fri Sep 16 02:36:03 2022 ] Top5: 82.05%
593
+ [ Fri Sep 16 02:36:03 2022 ] Training epoch: 90
594
+ [ Fri Sep 16 02:37:06 2022 ] Batch(81/162) done. Loss: 0.0063 lr:0.001000 network_time: 0.0316
595
+ [ Fri Sep 16 02:38:04 2022 ] Eval epoch: 90
596
+ [ Fri Sep 16 02:39:53 2022 ] Mean test loss of 930 batches: 2.497434616088867.
597
+ [ Fri Sep 16 02:39:53 2022 ] Top1: 55.47%
598
+ [ Fri Sep 16 02:39:53 2022 ] Top5: 82.13%
599
+ [ Fri Sep 16 02:39:54 2022 ] Training epoch: 91
600
+ [ Fri Sep 16 02:40:12 2022 ] Batch(19/162) done. Loss: 0.0053 lr:0.001000 network_time: 0.0273
601
+ [ Fri Sep 16 02:41:24 2022 ] Batch(119/162) done. Loss: 0.0271 lr:0.001000 network_time: 0.0275
602
+ [ Fri Sep 16 02:41:55 2022 ] Eval epoch: 91
603
+ [ Fri Sep 16 02:43:43 2022 ] Mean test loss of 930 batches: 2.519994020462036.
604
+ [ Fri Sep 16 02:43:44 2022 ] Top1: 55.82%
605
+ [ Fri Sep 16 02:43:44 2022 ] Top5: 82.46%
606
+ [ Fri Sep 16 02:43:44 2022 ] Training epoch: 92
607
+ [ Fri Sep 16 02:44:30 2022 ] Batch(57/162) done. Loss: 0.0090 lr:0.001000 network_time: 0.0321
608
+ [ Fri Sep 16 02:45:42 2022 ] Batch(157/162) done. Loss: 0.0116 lr:0.001000 network_time: 0.0258
609
+ [ Fri Sep 16 02:45:45 2022 ] Eval epoch: 92
610
+ [ Fri Sep 16 02:47:34 2022 ] Mean test loss of 930 batches: 2.5442798137664795.
611
+ [ Fri Sep 16 02:47:34 2022 ] Top1: 54.29%
612
+ [ Fri Sep 16 02:47:35 2022 ] Top5: 81.77%
613
+ [ Fri Sep 16 02:47:35 2022 ] Training epoch: 93
614
+ [ Fri Sep 16 02:48:48 2022 ] Batch(95/162) done. Loss: 0.0067 lr:0.001000 network_time: 0.0278
615
+ [ Fri Sep 16 02:49:36 2022 ] Eval epoch: 93
616
+ [ Fri Sep 16 02:51:24 2022 ] Mean test loss of 930 batches: 2.500120162963867.
617
+ [ Fri Sep 16 02:51:25 2022 ] Top1: 55.30%
618
+ [ Fri Sep 16 02:51:25 2022 ] Top5: 82.14%
619
+ [ Fri Sep 16 02:51:25 2022 ] Training epoch: 94
620
+ [ Fri Sep 16 02:51:54 2022 ] Batch(33/162) done. Loss: 0.0062 lr:0.001000 network_time: 0.0301
621
+ [ Fri Sep 16 02:53:06 2022 ] Batch(133/162) done. Loss: 0.0076 lr:0.001000 network_time: 0.0275
622
+ [ Fri Sep 16 02:53:27 2022 ] Eval epoch: 94
623
+ [ Fri Sep 16 02:55:16 2022 ] Mean test loss of 930 batches: 2.46699595451355.
624
+ [ Fri Sep 16 02:55:16 2022 ] Top1: 56.12%
625
+ [ Fri Sep 16 02:55:17 2022 ] Top5: 82.52%
626
+ [ Fri Sep 16 02:55:17 2022 ] Training epoch: 95
627
+ [ Fri Sep 16 02:56:12 2022 ] Batch(71/162) done. Loss: 0.0181 lr:0.001000 network_time: 0.0295
628
+ [ Fri Sep 16 02:57:18 2022 ] Eval epoch: 95
629
+ [ Fri Sep 16 02:59:07 2022 ] Mean test loss of 930 batches: 2.488225221633911.
630
+ [ Fri Sep 16 02:59:07 2022 ] Top1: 55.72%
631
+ [ Fri Sep 16 02:59:07 2022 ] Top5: 82.19%
632
+ [ Fri Sep 16 02:59:08 2022 ] Training epoch: 96
633
+ [ Fri Sep 16 02:59:18 2022 ] Batch(9/162) done. Loss: 0.0066 lr:0.001000 network_time: 0.0255
634
+ [ Fri Sep 16 03:00:31 2022 ] Batch(109/162) done. Loss: 0.0051 lr:0.001000 network_time: 0.0260
635
+ [ Fri Sep 16 03:01:09 2022 ] Eval epoch: 96
636
+ [ Fri Sep 16 03:02:57 2022 ] Mean test loss of 930 batches: 2.567333936691284.
637
+ [ Fri Sep 16 03:02:58 2022 ] Top1: 53.87%
638
+ [ Fri Sep 16 03:02:58 2022 ] Top5: 81.54%
639
+ [ Fri Sep 16 03:02:59 2022 ] Training epoch: 97
640
+ [ Fri Sep 16 03:03:36 2022 ] Batch(47/162) done. Loss: 0.0046 lr:0.001000 network_time: 0.0237
641
+ [ Fri Sep 16 03:04:49 2022 ] Batch(147/162) done. Loss: 0.0059 lr:0.001000 network_time: 0.0283
642
+ [ Fri Sep 16 03:04:59 2022 ] Eval epoch: 97
643
+ [ Fri Sep 16 03:06:48 2022 ] Mean test loss of 930 batches: 2.4648122787475586.
644
+ [ Fri Sep 16 03:06:48 2022 ] Top1: 55.67%
645
+ [ Fri Sep 16 03:06:49 2022 ] Top5: 82.32%
646
+ [ Fri Sep 16 03:06:49 2022 ] Training epoch: 98
647
+ [ Fri Sep 16 03:07:55 2022 ] Batch(85/162) done. Loss: 0.0076 lr:0.001000 network_time: 0.0262
648
+ [ Fri Sep 16 03:08:50 2022 ] Eval epoch: 98
649
+ [ Fri Sep 16 03:10:38 2022 ] Mean test loss of 930 batches: 2.4949991703033447.
650
+ [ Fri Sep 16 03:10:39 2022 ] Top1: 55.13%
651
+ [ Fri Sep 16 03:10:39 2022 ] Top5: 82.00%
652
+ [ Fri Sep 16 03:10:40 2022 ] Training epoch: 99
653
+ [ Fri Sep 16 03:11:00 2022 ] Batch(23/162) done. Loss: 0.0024 lr:0.001000 network_time: 0.0289
654
+ [ Fri Sep 16 03:12:13 2022 ] Batch(123/162) done. Loss: 0.0083 lr:0.001000 network_time: 0.0261
655
+ [ Fri Sep 16 03:12:41 2022 ] Eval epoch: 99
656
+ [ Fri Sep 16 03:14:29 2022 ] Mean test loss of 930 batches: 2.5010464191436768.
657
+ [ Fri Sep 16 03:14:29 2022 ] Top1: 54.66%
658
+ [ Fri Sep 16 03:14:30 2022 ] Top5: 81.95%
659
+ [ Fri Sep 16 03:14:30 2022 ] Training epoch: 100
660
+ [ Fri Sep 16 03:15:18 2022 ] Batch(61/162) done. Loss: 0.0057 lr:0.001000 network_time: 0.0331
661
+ [ Fri Sep 16 03:16:31 2022 ] Batch(161/162) done. Loss: 0.0055 lr:0.001000 network_time: 0.0280
662
+ [ Fri Sep 16 03:16:31 2022 ] Eval epoch: 100
663
+ [ Fri Sep 16 03:18:20 2022 ] Mean test loss of 930 batches: 2.5131969451904297.
664
+ [ Fri Sep 16 03:18:20 2022 ] Top1: 55.20%
665
+ [ Fri Sep 16 03:18:21 2022 ] Top5: 81.94%
666
+ [ Fri Sep 16 03:18:21 2022 ] Training epoch: 101
667
+ [ Fri Sep 16 03:19:37 2022 ] Batch(99/162) done. Loss: 0.0046 lr:0.000100 network_time: 0.0303
668
+ [ Fri Sep 16 03:20:22 2022 ] Eval epoch: 101
669
+ [ Fri Sep 16 03:22:11 2022 ] Mean test loss of 930 batches: 2.4665334224700928.
670
+ [ Fri Sep 16 03:22:11 2022 ] Top1: 55.54%
671
+ [ Fri Sep 16 03:22:12 2022 ] Top5: 82.38%
672
+ [ Fri Sep 16 03:22:12 2022 ] Training epoch: 102
673
+ [ Fri Sep 16 03:22:43 2022 ] Batch(37/162) done. Loss: 0.0060 lr:0.000100 network_time: 0.0228
674
+ [ Fri Sep 16 03:23:55 2022 ] Batch(137/162) done. Loss: 0.0044 lr:0.000100 network_time: 0.0312
675
+ [ Fri Sep 16 03:24:13 2022 ] Eval epoch: 102
676
+ [ Fri Sep 16 03:26:01 2022 ] Mean test loss of 930 batches: 2.4502739906311035.
677
+ [ Fri Sep 16 03:26:02 2022 ] Top1: 55.90%
678
+ [ Fri Sep 16 03:26:02 2022 ] Top5: 82.50%
679
+ [ Fri Sep 16 03:26:02 2022 ] Training epoch: 103
680
+ [ Fri Sep 16 03:27:01 2022 ] Batch(75/162) done. Loss: 0.0103 lr:0.000100 network_time: 0.0273
681
+ [ Fri Sep 16 03:28:04 2022 ] Eval epoch: 103
682
+ [ Fri Sep 16 03:29:53 2022 ] Mean test loss of 930 batches: 2.5235326290130615.
683
+ [ Fri Sep 16 03:29:53 2022 ] Top1: 55.81%
684
+ [ Fri Sep 16 03:29:54 2022 ] Top5: 82.14%
685
+ [ Fri Sep 16 03:29:54 2022 ] Training epoch: 104
686
+ [ Fri Sep 16 03:30:07 2022 ] Batch(13/162) done. Loss: 0.0055 lr:0.000100 network_time: 0.0256
687
+ [ Fri Sep 16 03:31:20 2022 ] Batch(113/162) done. Loss: 0.0029 lr:0.000100 network_time: 0.0274
688
+ [ Fri Sep 16 03:31:55 2022 ] Eval epoch: 104
689
+ [ Fri Sep 16 03:33:43 2022 ] Mean test loss of 930 batches: 2.4924840927124023.
690
+ [ Fri Sep 16 03:33:44 2022 ] Top1: 55.46%
691
+ [ Fri Sep 16 03:33:44 2022 ] Top5: 82.16%
692
+ [ Fri Sep 16 03:33:45 2022 ] Training epoch: 105
693
+ [ Fri Sep 16 03:34:26 2022 ] Batch(51/162) done. Loss: 0.0061 lr:0.000100 network_time: 0.0315
694
+ [ Fri Sep 16 03:35:38 2022 ] Batch(151/162) done. Loss: 0.0072 lr:0.000100 network_time: 0.0268
695
+ [ Fri Sep 16 03:35:46 2022 ] Eval epoch: 105
696
+ [ Fri Sep 16 03:37:34 2022 ] Mean test loss of 930 batches: 2.515953302383423.
697
+ [ Fri Sep 16 03:37:35 2022 ] Top1: 55.07%
698
+ [ Fri Sep 16 03:37:35 2022 ] Top5: 82.06%
699
+ [ Fri Sep 16 03:37:35 2022 ] Training epoch: 106
700
+ [ Fri Sep 16 03:38:44 2022 ] Batch(89/162) done. Loss: 0.0054 lr:0.000100 network_time: 0.0260
701
+ [ Fri Sep 16 03:39:36 2022 ] Eval epoch: 106
702
+ [ Fri Sep 16 03:41:25 2022 ] Mean test loss of 930 batches: 2.479599714279175.
703
+ [ Fri Sep 16 03:41:25 2022 ] Top1: 56.04%
704
+ [ Fri Sep 16 03:41:26 2022 ] Top5: 82.38%
705
+ [ Fri Sep 16 03:41:26 2022 ] Training epoch: 107
706
+ [ Fri Sep 16 03:41:49 2022 ] Batch(27/162) done. Loss: 0.0047 lr:0.000100 network_time: 0.0290
707
+ [ Fri Sep 16 03:43:02 2022 ] Batch(127/162) done. Loss: 0.0053 lr:0.000100 network_time: 0.0265
708
+ [ Fri Sep 16 03:43:27 2022 ] Eval epoch: 107
709
+ [ Fri Sep 16 03:45:15 2022 ] Mean test loss of 930 batches: 2.5122196674346924.
710
+ [ Fri Sep 16 03:45:15 2022 ] Top1: 55.54%
711
+ [ Fri Sep 16 03:45:16 2022 ] Top5: 82.23%
712
+ [ Fri Sep 16 03:45:16 2022 ] Training epoch: 108
713
+ [ Fri Sep 16 03:46:07 2022 ] Batch(65/162) done. Loss: 0.0042 lr:0.000100 network_time: 0.0276
714
+ [ Fri Sep 16 03:47:17 2022 ] Eval epoch: 108
715
+ [ Fri Sep 16 03:49:06 2022 ] Mean test loss of 930 batches: 2.4967191219329834.
716
+ [ Fri Sep 16 03:49:06 2022 ] Top1: 55.90%
717
+ [ Fri Sep 16 03:49:07 2022 ] Top5: 82.29%
718
+ [ Fri Sep 16 03:49:07 2022 ] Training epoch: 109
719
+ [ Fri Sep 16 03:49:13 2022 ] Batch(3/162) done. Loss: 0.0055 lr:0.000100 network_time: 0.0331
720
+ [ Fri Sep 16 03:50:26 2022 ] Batch(103/162) done. Loss: 0.0072 lr:0.000100 network_time: 0.0273
721
+ [ Fri Sep 16 03:51:08 2022 ] Eval epoch: 109
722
+ [ Fri Sep 16 03:52:56 2022 ] Mean test loss of 930 batches: 2.4798450469970703.
723
+ [ Fri Sep 16 03:52:57 2022 ] Top1: 55.22%
724
+ [ Fri Sep 16 03:52:57 2022 ] Top5: 82.04%
725
+ [ Fri Sep 16 03:52:58 2022 ] Training epoch: 110
726
+ [ Fri Sep 16 03:53:31 2022 ] Batch(41/162) done. Loss: 0.0102 lr:0.000100 network_time: 0.0264
727
+ [ Fri Sep 16 03:54:44 2022 ] Batch(141/162) done. Loss: 0.0033 lr:0.000100 network_time: 0.0272
728
+ [ Fri Sep 16 03:54:58 2022 ] Eval epoch: 110
729
+ [ Fri Sep 16 03:56:47 2022 ] Mean test loss of 930 batches: 2.487183094024658.
730
+ [ Fri Sep 16 03:56:47 2022 ] Top1: 55.46%
731
+ [ Fri Sep 16 03:56:48 2022 ] Top5: 82.16%
732
+ [ Fri Sep 16 03:56:48 2022 ] Training epoch: 111
733
+ [ Fri Sep 16 03:57:49 2022 ] Batch(79/162) done. Loss: 0.0044 lr:0.000100 network_time: 0.0267
734
+ [ Fri Sep 16 03:58:49 2022 ] Eval epoch: 111
735
+ [ Fri Sep 16 04:00:37 2022 ] Mean test loss of 930 batches: 2.580383539199829.
736
+ [ Fri Sep 16 04:00:38 2022 ] Top1: 54.04%
737
+ [ Fri Sep 16 04:00:38 2022 ] Top5: 81.62%
738
+ [ Fri Sep 16 04:00:39 2022 ] Training epoch: 112
739
+ [ Fri Sep 16 04:00:55 2022 ] Batch(17/162) done. Loss: 0.0042 lr:0.000100 network_time: 0.0292
740
+ [ Fri Sep 16 04:02:08 2022 ] Batch(117/162) done. Loss: 0.0080 lr:0.000100 network_time: 0.0281
741
+ [ Fri Sep 16 04:02:40 2022 ] Eval epoch: 112
742
+ [ Fri Sep 16 04:04:28 2022 ] Mean test loss of 930 batches: 2.472482442855835.
743
+ [ Fri Sep 16 04:04:29 2022 ] Top1: 56.17%
744
+ [ Fri Sep 16 04:04:29 2022 ] Top5: 82.30%
745
+ [ Fri Sep 16 04:04:29 2022 ] Training epoch: 113
746
+ [ Fri Sep 16 04:05:13 2022 ] Batch(55/162) done. Loss: 0.0036 lr:0.000100 network_time: 0.0282
747
+ [ Fri Sep 16 04:06:26 2022 ] Batch(155/162) done. Loss: 0.0104 lr:0.000100 network_time: 0.0279
748
+ [ Fri Sep 16 04:06:31 2022 ] Eval epoch: 113
749
+ [ Fri Sep 16 04:08:19 2022 ] Mean test loss of 930 batches: 2.5517749786376953.
750
+ [ Fri Sep 16 04:08:20 2022 ] Top1: 54.01%
751
+ [ Fri Sep 16 04:08:20 2022 ] Top5: 81.42%
752
+ [ Fri Sep 16 04:08:21 2022 ] Training epoch: 114
753
+ [ Fri Sep 16 04:09:32 2022 ] Batch(93/162) done. Loss: 0.0054 lr:0.000100 network_time: 0.0269
754
+ [ Fri Sep 16 04:10:22 2022 ] Eval epoch: 114
755
+ [ Fri Sep 16 04:12:10 2022 ] Mean test loss of 930 batches: 2.4713258743286133.
756
+ [ Fri Sep 16 04:12:10 2022 ] Top1: 55.90%
757
+ [ Fri Sep 16 04:12:11 2022 ] Top5: 82.37%
758
+ [ Fri Sep 16 04:12:11 2022 ] Training epoch: 115
759
+ [ Fri Sep 16 04:12:38 2022 ] Batch(31/162) done. Loss: 0.0062 lr:0.000100 network_time: 0.0263
760
+ [ Fri Sep 16 04:13:50 2022 ] Batch(131/162) done. Loss: 0.0058 lr:0.000100 network_time: 0.0277
761
+ [ Fri Sep 16 04:14:12 2022 ] Eval epoch: 115
762
+ [ Fri Sep 16 04:16:01 2022 ] Mean test loss of 930 batches: 2.5101470947265625.
763
+ [ Fri Sep 16 04:16:02 2022 ] Top1: 54.96%
764
+ [ Fri Sep 16 04:16:02 2022 ] Top5: 81.76%
765
+ [ Fri Sep 16 04:16:02 2022 ] Training epoch: 116
766
+ [ Fri Sep 16 04:16:57 2022 ] Batch(69/162) done. Loss: 0.0025 lr:0.000100 network_time: 0.0270
767
+ [ Fri Sep 16 04:18:04 2022 ] Eval epoch: 116
768
+ [ Fri Sep 16 04:19:52 2022 ] Mean test loss of 930 batches: 2.466994047164917.
769
+ [ Fri Sep 16 04:19:52 2022 ] Top1: 55.87%
770
+ [ Fri Sep 16 04:19:53 2022 ] Top5: 82.43%
771
+ [ Fri Sep 16 04:19:53 2022 ] Training epoch: 117
772
+ [ Fri Sep 16 04:20:02 2022 ] Batch(7/162) done. Loss: 0.0102 lr:0.000100 network_time: 0.0335
773
+ [ Fri Sep 16 04:21:14 2022 ] Batch(107/162) done. Loss: 0.0040 lr:0.000100 network_time: 0.0267
774
+ [ Fri Sep 16 04:21:54 2022 ] Eval epoch: 117
775
+ [ Fri Sep 16 04:23:42 2022 ] Mean test loss of 930 batches: 2.4645636081695557.
776
+ [ Fri Sep 16 04:23:43 2022 ] Top1: 56.02%
777
+ [ Fri Sep 16 04:23:43 2022 ] Top5: 82.30%
778
+ [ Fri Sep 16 04:23:43 2022 ] Training epoch: 118
779
+ [ Fri Sep 16 04:24:20 2022 ] Batch(45/162) done. Loss: 0.0073 lr:0.000100 network_time: 0.0287
780
+ [ Fri Sep 16 04:25:33 2022 ] Batch(145/162) done. Loss: 0.0072 lr:0.000100 network_time: 0.0273
781
+ [ Fri Sep 16 04:25:44 2022 ] Eval epoch: 118
782
+ [ Fri Sep 16 04:27:33 2022 ] Mean test loss of 930 batches: 2.5144882202148438.
783
+ [ Fri Sep 16 04:27:34 2022 ] Top1: 55.68%
784
+ [ Fri Sep 16 04:27:34 2022 ] Top5: 82.14%
785
+ [ Fri Sep 16 04:27:34 2022 ] Training epoch: 119
786
+ [ Fri Sep 16 04:28:38 2022 ] Batch(83/162) done. Loss: 0.0052 lr:0.000100 network_time: 0.0334
787
+ [ Fri Sep 16 04:29:35 2022 ] Eval epoch: 119
788
+ [ Fri Sep 16 04:31:24 2022 ] Mean test loss of 930 batches: 2.436511993408203.
789
+ [ Fri Sep 16 04:31:25 2022 ] Top1: 56.25%
790
+ [ Fri Sep 16 04:31:25 2022 ] Top5: 82.43%
791
+ [ Fri Sep 16 04:31:25 2022 ] Training epoch: 120
792
+ [ Fri Sep 16 04:31:45 2022 ] Batch(21/162) done. Loss: 0.0137 lr:0.000100 network_time: 0.0301
793
+ [ Fri Sep 16 04:32:57 2022 ] Batch(121/162) done. Loss: 0.0038 lr:0.000100 network_time: 0.0281
794
+ [ Fri Sep 16 04:33:26 2022 ] Eval epoch: 120
795
+ [ Fri Sep 16 04:35:15 2022 ] Mean test loss of 930 batches: 2.4610888957977295.
796
+ [ Fri Sep 16 04:35:15 2022 ] Top1: 55.96%
797
+ [ Fri Sep 16 04:35:16 2022 ] Top5: 82.46%
798
+ [ Fri Sep 16 04:35:16 2022 ] Training epoch: 121
799
+ [ Fri Sep 16 04:36:03 2022 ] Batch(59/162) done. Loss: 0.0081 lr:0.000100 network_time: 0.0287
800
+ [ Fri Sep 16 04:37:16 2022 ] Batch(159/162) done. Loss: 0.0052 lr:0.000100 network_time: 0.0276
801
+ [ Fri Sep 16 04:37:17 2022 ] Eval epoch: 121
802
+ [ Fri Sep 16 04:39:06 2022 ] Mean test loss of 930 batches: 2.50304913520813.
803
+ [ Fri Sep 16 04:39:06 2022 ] Top1: 55.96%
804
+ [ Fri Sep 16 04:39:07 2022 ] Top5: 82.19%
805
+ [ Fri Sep 16 04:39:07 2022 ] Training epoch: 122
806
+ [ Fri Sep 16 04:40:21 2022 ] Batch(97/162) done. Loss: 0.0044 lr:0.000100 network_time: 0.0331
807
+ [ Fri Sep 16 04:41:08 2022 ] Eval epoch: 122
808
+ [ Fri Sep 16 04:42:57 2022 ] Mean test loss of 930 batches: 2.4955897331237793.
809
+ [ Fri Sep 16 04:42:57 2022 ] Top1: 55.60%
810
+ [ Fri Sep 16 04:42:58 2022 ] Top5: 82.18%
811
+ [ Fri Sep 16 04:42:58 2022 ] Training epoch: 123
812
+ [ Fri Sep 16 04:43:27 2022 ] Batch(35/162) done. Loss: 0.0118 lr:0.000100 network_time: 0.0309
813
+ [ Fri Sep 16 04:44:40 2022 ] Batch(135/162) done. Loss: 0.0049 lr:0.000100 network_time: 0.0266
814
+ [ Fri Sep 16 04:44:59 2022 ] Eval epoch: 123
815
+ [ Fri Sep 16 04:46:47 2022 ] Mean test loss of 930 batches: 2.523346185684204.
816
+ [ Fri Sep 16 04:46:48 2022 ] Top1: 54.91%
817
+ [ Fri Sep 16 04:46:48 2022 ] Top5: 81.91%
818
+ [ Fri Sep 16 04:46:48 2022 ] Training epoch: 124
819
+ [ Fri Sep 16 04:47:45 2022 ] Batch(73/162) done. Loss: 0.0091 lr:0.000100 network_time: 0.0302
820
+ [ Fri Sep 16 04:48:50 2022 ] Eval epoch: 124
821
+ [ Fri Sep 16 04:50:39 2022 ] Mean test loss of 930 batches: 2.455005168914795.
822
+ [ Fri Sep 16 04:50:40 2022 ] Top1: 56.26%
823
+ [ Fri Sep 16 04:50:40 2022 ] Top5: 82.41%
824
+ [ Fri Sep 16 04:50:40 2022 ] Training epoch: 125
825
+ [ Fri Sep 16 04:50:52 2022 ] Batch(11/162) done. Loss: 0.0039 lr:0.000100 network_time: 0.0295
826
+ [ Fri Sep 16 04:52:05 2022 ] Batch(111/162) done. Loss: 0.0035 lr:0.000100 network_time: 0.0289
827
+ [ Fri Sep 16 04:52:42 2022 ] Eval epoch: 125
828
+ [ Fri Sep 16 04:54:30 2022 ] Mean test loss of 930 batches: 2.5138392448425293.
829
+ [ Fri Sep 16 04:54:30 2022 ] Top1: 55.17%
830
+ [ Fri Sep 16 04:54:31 2022 ] Top5: 82.06%
831
+ [ Fri Sep 16 04:54:31 2022 ] Training epoch: 126
832
+ [ Fri Sep 16 04:55:11 2022 ] Batch(49/162) done. Loss: 0.0060 lr:0.000100 network_time: 0.0278
833
+ [ Fri Sep 16 04:56:23 2022 ] Batch(149/162) done. Loss: 0.0061 lr:0.000100 network_time: 0.0257
834
+ [ Fri Sep 16 04:56:32 2022 ] Eval epoch: 126
835
+ [ Fri Sep 16 04:58:20 2022 ] Mean test loss of 930 batches: 2.4895901679992676.
836
+ [ Fri Sep 16 04:58:21 2022 ] Top1: 55.15%
837
+ [ Fri Sep 16 04:58:21 2022 ] Top5: 82.06%
838
+ [ Fri Sep 16 04:58:22 2022 ] Training epoch: 127
839
+ [ Fri Sep 16 04:59:29 2022 ] Batch(87/162) done. Loss: 0.0051 lr:0.000100 network_time: 0.0358
840
+ [ Fri Sep 16 05:00:22 2022 ] Eval epoch: 127
841
+ [ Fri Sep 16 05:02:11 2022 ] Mean test loss of 930 batches: 2.492002010345459.
842
+ [ Fri Sep 16 05:02:11 2022 ] Top1: 55.78%
843
+ [ Fri Sep 16 05:02:12 2022 ] Top5: 82.30%
844
+ [ Fri Sep 16 05:02:12 2022 ] Training epoch: 128
845
+ [ Fri Sep 16 05:02:34 2022 ] Batch(25/162) done. Loss: 0.0043 lr:0.000100 network_time: 0.0336
846
+ [ Fri Sep 16 05:03:47 2022 ] Batch(125/162) done. Loss: 0.0035 lr:0.000100 network_time: 0.0265
847
+ [ Fri Sep 16 05:04:13 2022 ] Eval epoch: 128
848
+ [ Fri Sep 16 05:06:02 2022 ] Mean test loss of 930 batches: 2.4913203716278076.
849
+ [ Fri Sep 16 05:06:02 2022 ] Top1: 55.61%
850
+ [ Fri Sep 16 05:06:02 2022 ] Top5: 82.05%
851
+ [ Fri Sep 16 05:06:03 2022 ] Training epoch: 129
852
+ [ Fri Sep 16 05:06:53 2022 ] Batch(63/162) done. Loss: 0.0023 lr:0.000100 network_time: 0.0286
853
+ [ Fri Sep 16 05:08:04 2022 ] Eval epoch: 129
854
+ [ Fri Sep 16 05:09:52 2022 ] Mean test loss of 930 batches: 2.4635448455810547.
855
+ [ Fri Sep 16 05:09:53 2022 ] Top1: 55.69%
856
+ [ Fri Sep 16 05:09:53 2022 ] Top5: 82.30%
857
+ [ Fri Sep 16 05:09:53 2022 ] Training epoch: 130
858
+ [ Fri Sep 16 05:09:58 2022 ] Batch(1/162) done. Loss: 0.0034 lr:0.000100 network_time: 0.0330
859
+ [ Fri Sep 16 05:11:11 2022 ] Batch(101/162) done. Loss: 0.0111 lr:0.000100 network_time: 0.0464
860
+ [ Fri Sep 16 05:11:55 2022 ] Eval epoch: 130
861
+ [ Fri Sep 16 05:13:43 2022 ] Mean test loss of 930 batches: 2.4768619537353516.
862
+ [ Fri Sep 16 05:13:44 2022 ] Top1: 56.12%
863
+ [ Fri Sep 16 05:13:44 2022 ] Top5: 82.42%
864
+ [ Fri Sep 16 05:13:44 2022 ] Training epoch: 131
865
+ [ Fri Sep 16 05:14:17 2022 ] Batch(39/162) done. Loss: 0.0049 lr:0.000100 network_time: 0.0280
866
+ [ Fri Sep 16 05:15:29 2022 ] Batch(139/162) done. Loss: 0.0040 lr:0.000100 network_time: 0.0272
867
+ [ Fri Sep 16 05:15:46 2022 ] Eval epoch: 131
868
+ [ Fri Sep 16 05:17:34 2022 ] Mean test loss of 930 batches: 2.473442554473877.
869
+ [ Fri Sep 16 05:17:34 2022 ] Top1: 56.14%
870
+ [ Fri Sep 16 05:17:35 2022 ] Top5: 82.52%
871
+ [ Fri Sep 16 05:17:35 2022 ] Training epoch: 132
872
+ [ Fri Sep 16 05:18:35 2022 ] Batch(77/162) done. Loss: 0.0044 lr:0.000100 network_time: 0.0265
873
+ [ Fri Sep 16 05:19:36 2022 ] Eval epoch: 132
874
+ [ Fri Sep 16 05:21:24 2022 ] Mean test loss of 930 batches: 2.4647676944732666.
875
+ [ Fri Sep 16 05:21:25 2022 ] Top1: 55.30%
876
+ [ Fri Sep 16 05:21:25 2022 ] Top5: 82.25%
877
+ [ Fri Sep 16 05:21:26 2022 ] Training epoch: 133
878
+ [ Fri Sep 16 05:21:40 2022 ] Batch(15/162) done. Loss: 0.0071 lr:0.000100 network_time: 0.0346
879
+ [ Fri Sep 16 05:22:53 2022 ] Batch(115/162) done. Loss: 0.0053 lr:0.000100 network_time: 0.0266
880
+ [ Fri Sep 16 05:23:27 2022 ] Eval epoch: 133
881
+ [ Fri Sep 16 05:25:15 2022 ] Mean test loss of 930 batches: 2.603180408477783.
882
+ [ Fri Sep 16 05:25:16 2022 ] Top1: 53.21%
883
+ [ Fri Sep 16 05:25:16 2022 ] Top5: 81.03%
884
+ [ Fri Sep 16 05:25:16 2022 ] Training epoch: 134
885
+ [ Fri Sep 16 05:25:59 2022 ] Batch(53/162) done. Loss: 0.0043 lr:0.000100 network_time: 0.0274
886
+ [ Fri Sep 16 05:27:12 2022 ] Batch(153/162) done. Loss: 0.0059 lr:0.000100 network_time: 0.0275
887
+ [ Fri Sep 16 05:27:18 2022 ] Eval epoch: 134
888
+ [ Fri Sep 16 05:29:06 2022 ] Mean test loss of 930 batches: 2.610170364379883.
889
+ [ Fri Sep 16 05:29:06 2022 ] Top1: 53.38%
890
+ [ Fri Sep 16 05:29:07 2022 ] Top5: 81.18%
891
+ [ Fri Sep 16 05:29:07 2022 ] Training epoch: 135
892
+ [ Fri Sep 16 05:30:17 2022 ] Batch(91/162) done. Loss: 0.0038 lr:0.000100 network_time: 0.0276
893
+ [ Fri Sep 16 05:31:08 2022 ] Eval epoch: 135
894
+ [ Fri Sep 16 05:32:57 2022 ] Mean test loss of 930 batches: 2.456367254257202.
895
+ [ Fri Sep 16 05:32:57 2022 ] Top1: 55.63%
896
+ [ Fri Sep 16 05:32:58 2022 ] Top5: 82.31%
897
+ [ Fri Sep 16 05:32:58 2022 ] Training epoch: 136
898
+ [ Fri Sep 16 05:33:23 2022 ] Batch(29/162) done. Loss: 0.0082 lr:0.000100 network_time: 0.0315
899
+ [ Fri Sep 16 05:34:36 2022 ] Batch(129/162) done. Loss: 0.0039 lr:0.000100 network_time: 0.0327
900
+ [ Fri Sep 16 05:34:59 2022 ] Eval epoch: 136
901
+ [ Fri Sep 16 05:36:48 2022 ] Mean test loss of 930 batches: 2.4485137462615967.
902
+ [ Fri Sep 16 05:36:48 2022 ] Top1: 56.15%
903
+ [ Fri Sep 16 05:36:49 2022 ] Top5: 82.54%
904
+ [ Fri Sep 16 05:36:49 2022 ] Training epoch: 137
905
+ [ Fri Sep 16 05:37:42 2022 ] Batch(67/162) done. Loss: 0.0027 lr:0.000100 network_time: 0.0256
906
+ [ Fri Sep 16 05:38:50 2022 ] Eval epoch: 137
907
+ [ Fri Sep 16 05:40:39 2022 ] Mean test loss of 930 batches: 2.4866716861724854.
908
+ [ Fri Sep 16 05:40:40 2022 ] Top1: 55.65%
909
+ [ Fri Sep 16 05:40:40 2022 ] Top5: 82.24%
910
+ [ Fri Sep 16 05:40:41 2022 ] Training epoch: 138
911
+ [ Fri Sep 16 05:40:48 2022 ] Batch(5/162) done. Loss: 0.0039 lr:0.000100 network_time: 0.0279
912
+ [ Fri Sep 16 05:42:01 2022 ] Batch(105/162) done. Loss: 0.0069 lr:0.000100 network_time: 0.0314
913
+ [ Fri Sep 16 05:42:42 2022 ] Eval epoch: 138
914
+ [ Fri Sep 16 05:44:30 2022 ] Mean test loss of 930 batches: 2.444272518157959.
915
+ [ Fri Sep 16 05:44:31 2022 ] Top1: 56.21%
916
+ [ Fri Sep 16 05:44:31 2022 ] Top5: 82.68%
917
+ [ Fri Sep 16 05:44:31 2022 ] Training epoch: 139
918
+ [ Fri Sep 16 05:45:07 2022 ] Batch(43/162) done. Loss: 0.0072 lr:0.000100 network_time: 0.0277
919
+ [ Fri Sep 16 05:46:19 2022 ] Batch(143/162) done. Loss: 0.0030 lr:0.000100 network_time: 0.0275
920
+ [ Fri Sep 16 05:46:33 2022 ] Eval epoch: 139
921
+ [ Fri Sep 16 05:48:21 2022 ] Mean test loss of 930 batches: 2.5009186267852783.
922
+ [ Fri Sep 16 05:48:22 2022 ] Top1: 55.14%
923
+ [ Fri Sep 16 05:48:22 2022 ] Top5: 81.98%
924
+ [ Fri Sep 16 05:48:22 2022 ] Training epoch: 140
925
+ [ Fri Sep 16 05:49:25 2022 ] Batch(81/162) done. Loss: 0.0030 lr:0.000100 network_time: 0.0236
926
+ [ Fri Sep 16 05:50:24 2022 ] Eval epoch: 140
927
+ [ Fri Sep 16 05:52:12 2022 ] Mean test loss of 930 batches: 2.492652177810669.
928
+ [ Fri Sep 16 05:52:13 2022 ] Top1: 55.05%
929
+ [ Fri Sep 16 05:52:13 2022 ] Top5: 81.94%
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_motion_xset/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_bone_xset
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xset/train_bone.yaml
5
+ device:
6
+ - 0
7
+ - 1
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_bone_xset
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_bone_xset
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3f590d1de630391236ee05c1526ce6353bd0452854b5fdfb3a58563be9687d31
3
+ size 34946665
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/log.txt ADDED
@@ -0,0 +1,929 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 15 20:53:09 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_bone_xset', 'model_saved_name': './save_models/ntu120_bone_xset', 'Experiment_name': 'ntu120_bone_xset', 'config': './config/ntu120_xset/train_bone.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [0, 1], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Thu Sep 15 20:53:09 2022 ] Training epoch: 1
5
+ [ Thu Sep 15 20:54:27 2022 ] Batch(99/162) done. Loss: 3.1745 lr:0.100000 network_time: 0.0269
6
+ [ Thu Sep 15 20:55:12 2022 ] Eval epoch: 1
7
+ [ Thu Sep 15 20:57:02 2022 ] Mean test loss of 930 batches: 5.054730415344238.
8
+ [ Thu Sep 15 20:57:03 2022 ] Top1: 7.17%
9
+ [ Thu Sep 15 20:57:03 2022 ] Top5: 25.35%
10
+ [ Thu Sep 15 20:57:03 2022 ] Training epoch: 2
11
+ [ Thu Sep 15 20:57:34 2022 ] Batch(37/162) done. Loss: 2.2364 lr:0.100000 network_time: 0.0296
12
+ [ Thu Sep 15 20:58:47 2022 ] Batch(137/162) done. Loss: 2.4230 lr:0.100000 network_time: 0.0264
13
+ [ Thu Sep 15 20:59:04 2022 ] Eval epoch: 2
14
+ [ Thu Sep 15 21:00:55 2022 ] Mean test loss of 930 batches: 4.307687282562256.
15
+ [ Thu Sep 15 21:00:55 2022 ] Top1: 13.88%
16
+ [ Thu Sep 15 21:00:56 2022 ] Top5: 35.46%
17
+ [ Thu Sep 15 21:00:56 2022 ] Training epoch: 3
18
+ [ Thu Sep 15 21:01:54 2022 ] Batch(75/162) done. Loss: 2.3808 lr:0.100000 network_time: 0.0324
19
+ [ Thu Sep 15 21:02:57 2022 ] Eval epoch: 3
20
+ [ Thu Sep 15 21:04:47 2022 ] Mean test loss of 930 batches: 3.7918362617492676.
21
+ [ Thu Sep 15 21:04:47 2022 ] Top1: 19.66%
22
+ [ Thu Sep 15 21:04:48 2022 ] Top5: 44.34%
23
+ [ Thu Sep 15 21:04:48 2022 ] Training epoch: 4
24
+ [ Thu Sep 15 21:05:01 2022 ] Batch(13/162) done. Loss: 1.9126 lr:0.100000 network_time: 0.0397
25
+ [ Thu Sep 15 21:06:14 2022 ] Batch(113/162) done. Loss: 1.7151 lr:0.100000 network_time: 0.0259
26
+ [ Thu Sep 15 21:06:49 2022 ] Eval epoch: 4
27
+ [ Thu Sep 15 21:08:38 2022 ] Mean test loss of 930 batches: 3.3427066802978516.
28
+ [ Thu Sep 15 21:08:39 2022 ] Top1: 23.91%
29
+ [ Thu Sep 15 21:08:39 2022 ] Top5: 49.61%
30
+ [ Thu Sep 15 21:08:40 2022 ] Training epoch: 5
31
+ [ Thu Sep 15 21:09:20 2022 ] Batch(51/162) done. Loss: 1.5963 lr:0.100000 network_time: 0.0277
32
+ [ Thu Sep 15 21:10:33 2022 ] Batch(151/162) done. Loss: 2.0971 lr:0.100000 network_time: 0.0262
33
+ [ Thu Sep 15 21:10:40 2022 ] Eval epoch: 5
34
+ [ Thu Sep 15 21:12:30 2022 ] Mean test loss of 930 batches: 3.069018602371216.
35
+ [ Thu Sep 15 21:12:31 2022 ] Top1: 28.82%
36
+ [ Thu Sep 15 21:12:31 2022 ] Top5: 57.04%
37
+ [ Thu Sep 15 21:12:31 2022 ] Training epoch: 6
38
+ [ Thu Sep 15 21:13:40 2022 ] Batch(89/162) done. Loss: 1.6986 lr:0.100000 network_time: 0.0322
39
+ [ Thu Sep 15 21:14:32 2022 ] Eval epoch: 6
40
+ [ Thu Sep 15 21:16:23 2022 ] Mean test loss of 930 batches: 2.9965245723724365.
41
+ [ Thu Sep 15 21:16:23 2022 ] Top1: 29.52%
42
+ [ Thu Sep 15 21:16:24 2022 ] Top5: 59.43%
43
+ [ Thu Sep 15 21:16:24 2022 ] Training epoch: 7
44
+ [ Thu Sep 15 21:16:47 2022 ] Batch(27/162) done. Loss: 1.3754 lr:0.100000 network_time: 0.0283
45
+ [ Thu Sep 15 21:18:00 2022 ] Batch(127/162) done. Loss: 1.2434 lr:0.100000 network_time: 0.0266
46
+ [ Thu Sep 15 21:18:25 2022 ] Eval epoch: 7
47
+ [ Thu Sep 15 21:20:14 2022 ] Mean test loss of 930 batches: 2.6982879638671875.
48
+ [ Thu Sep 15 21:20:15 2022 ] Top1: 35.94%
49
+ [ Thu Sep 15 21:20:15 2022 ] Top5: 66.52%
50
+ [ Thu Sep 15 21:20:15 2022 ] Training epoch: 8
51
+ [ Thu Sep 15 21:21:06 2022 ] Batch(65/162) done. Loss: 1.2703 lr:0.100000 network_time: 0.0313
52
+ [ Thu Sep 15 21:22:16 2022 ] Eval epoch: 8
53
+ [ Thu Sep 15 21:24:05 2022 ] Mean test loss of 930 batches: 2.8924400806427.
54
+ [ Thu Sep 15 21:24:06 2022 ] Top1: 34.11%
55
+ [ Thu Sep 15 21:24:06 2022 ] Top5: 63.32%
56
+ [ Thu Sep 15 21:24:06 2022 ] Training epoch: 9
57
+ [ Thu Sep 15 21:24:12 2022 ] Batch(3/162) done. Loss: 0.9692 lr:0.100000 network_time: 0.0265
58
+ [ Thu Sep 15 21:25:25 2022 ] Batch(103/162) done. Loss: 1.3851 lr:0.100000 network_time: 0.0268
59
+ [ Thu Sep 15 21:26:07 2022 ] Eval epoch: 9
60
+ [ Thu Sep 15 21:27:58 2022 ] Mean test loss of 930 batches: 3.032758951187134.
61
+ [ Thu Sep 15 21:27:58 2022 ] Top1: 35.23%
62
+ [ Thu Sep 15 21:27:59 2022 ] Top5: 64.33%
63
+ [ Thu Sep 15 21:27:59 2022 ] Training epoch: 10
64
+ [ Thu Sep 15 21:28:32 2022 ] Batch(41/162) done. Loss: 0.9587 lr:0.100000 network_time: 0.0267
65
+ [ Thu Sep 15 21:29:45 2022 ] Batch(141/162) done. Loss: 0.9386 lr:0.100000 network_time: 0.0262
66
+ [ Thu Sep 15 21:29:59 2022 ] Eval epoch: 10
67
+ [ Thu Sep 15 21:31:49 2022 ] Mean test loss of 930 batches: 2.911442995071411.
68
+ [ Thu Sep 15 21:31:50 2022 ] Top1: 37.85%
69
+ [ Thu Sep 15 21:31:50 2022 ] Top5: 66.94%
70
+ [ Thu Sep 15 21:31:51 2022 ] Training epoch: 11
71
+ [ Thu Sep 15 21:32:52 2022 ] Batch(79/162) done. Loss: 1.0747 lr:0.100000 network_time: 0.0276
72
+ [ Thu Sep 15 21:33:51 2022 ] Eval epoch: 11
73
+ [ Thu Sep 15 21:35:41 2022 ] Mean test loss of 930 batches: 2.7861416339874268.
74
+ [ Thu Sep 15 21:35:41 2022 ] Top1: 37.24%
75
+ [ Thu Sep 15 21:35:42 2022 ] Top5: 68.24%
76
+ [ Thu Sep 15 21:35:42 2022 ] Training epoch: 12
77
+ [ Thu Sep 15 21:35:58 2022 ] Batch(17/162) done. Loss: 0.8764 lr:0.100000 network_time: 0.0337
78
+ [ Thu Sep 15 21:37:10 2022 ] Batch(117/162) done. Loss: 1.0292 lr:0.100000 network_time: 0.0271
79
+ [ Thu Sep 15 21:37:43 2022 ] Eval epoch: 12
80
+ [ Thu Sep 15 21:39:32 2022 ] Mean test loss of 930 batches: 2.67501163482666.
81
+ [ Thu Sep 15 21:39:33 2022 ] Top1: 38.34%
82
+ [ Thu Sep 15 21:39:33 2022 ] Top5: 71.56%
83
+ [ Thu Sep 15 21:39:33 2022 ] Training epoch: 13
84
+ [ Thu Sep 15 21:40:16 2022 ] Batch(55/162) done. Loss: 0.7406 lr:0.100000 network_time: 0.0259
85
+ [ Thu Sep 15 21:41:29 2022 ] Batch(155/162) done. Loss: 1.0139 lr:0.100000 network_time: 0.0253
86
+ [ Thu Sep 15 21:41:33 2022 ] Eval epoch: 13
87
+ [ Thu Sep 15 21:43:24 2022 ] Mean test loss of 930 batches: 2.403536558151245.
88
+ [ Thu Sep 15 21:43:24 2022 ] Top1: 40.37%
89
+ [ Thu Sep 15 21:43:25 2022 ] Top5: 72.13%
90
+ [ Thu Sep 15 21:43:25 2022 ] Training epoch: 14
91
+ [ Thu Sep 15 21:44:36 2022 ] Batch(93/162) done. Loss: 1.0867 lr:0.100000 network_time: 0.0265
92
+ [ Thu Sep 15 21:45:26 2022 ] Eval epoch: 14
93
+ [ Thu Sep 15 21:47:15 2022 ] Mean test loss of 930 batches: 2.493147373199463.
94
+ [ Thu Sep 15 21:47:15 2022 ] Top1: 39.78%
95
+ [ Thu Sep 15 21:47:15 2022 ] Top5: 72.48%
96
+ [ Thu Sep 15 21:47:16 2022 ] Training epoch: 15
97
+ [ Thu Sep 15 21:47:42 2022 ] Batch(31/162) done. Loss: 0.8762 lr:0.100000 network_time: 0.0296
98
+ [ Thu Sep 15 21:48:54 2022 ] Batch(131/162) done. Loss: 0.5847 lr:0.100000 network_time: 0.0270
99
+ [ Thu Sep 15 21:49:16 2022 ] Eval epoch: 15
100
+ [ Thu Sep 15 21:51:06 2022 ] Mean test loss of 930 batches: 2.6337034702301025.
101
+ [ Thu Sep 15 21:51:06 2022 ] Top1: 40.75%
102
+ [ Thu Sep 15 21:51:07 2022 ] Top5: 72.22%
103
+ [ Thu Sep 15 21:51:07 2022 ] Training epoch: 16
104
+ [ Thu Sep 15 21:52:01 2022 ] Batch(69/162) done. Loss: 0.6017 lr:0.100000 network_time: 0.0269
105
+ [ Thu Sep 15 21:53:08 2022 ] Eval epoch: 16
106
+ [ Thu Sep 15 21:54:58 2022 ] Mean test loss of 930 batches: 2.993208885192871.
107
+ [ Thu Sep 15 21:54:58 2022 ] Top1: 39.27%
108
+ [ Thu Sep 15 21:54:58 2022 ] Top5: 71.64%
109
+ [ Thu Sep 15 21:54:59 2022 ] Training epoch: 17
110
+ [ Thu Sep 15 21:55:07 2022 ] Batch(7/162) done. Loss: 0.6349 lr:0.100000 network_time: 0.0286
111
+ [ Thu Sep 15 21:56:20 2022 ] Batch(107/162) done. Loss: 0.8137 lr:0.100000 network_time: 0.0272
112
+ [ Thu Sep 15 21:56:59 2022 ] Eval epoch: 17
113
+ [ Thu Sep 15 21:58:50 2022 ] Mean test loss of 930 batches: 2.7564895153045654.
114
+ [ Thu Sep 15 21:58:50 2022 ] Top1: 38.92%
115
+ [ Thu Sep 15 21:58:51 2022 ] Top5: 71.24%
116
+ [ Thu Sep 15 21:58:51 2022 ] Training epoch: 18
117
+ [ Thu Sep 15 21:59:27 2022 ] Batch(45/162) done. Loss: 0.7027 lr:0.100000 network_time: 0.0281
118
+ [ Thu Sep 15 22:00:39 2022 ] Batch(145/162) done. Loss: 0.6394 lr:0.100000 network_time: 0.0250
119
+ [ Thu Sep 15 22:00:51 2022 ] Eval epoch: 18
120
+ [ Thu Sep 15 22:02:41 2022 ] Mean test loss of 930 batches: 2.665600299835205.
121
+ [ Thu Sep 15 22:02:41 2022 ] Top1: 41.95%
122
+ [ Thu Sep 15 22:02:42 2022 ] Top5: 72.42%
123
+ [ Thu Sep 15 22:02:42 2022 ] Training epoch: 19
124
+ [ Thu Sep 15 22:03:46 2022 ] Batch(83/162) done. Loss: 0.4636 lr:0.100000 network_time: 0.0268
125
+ [ Thu Sep 15 22:04:43 2022 ] Eval epoch: 19
126
+ [ Thu Sep 15 22:06:32 2022 ] Mean test loss of 930 batches: 2.7386324405670166.
127
+ [ Thu Sep 15 22:06:33 2022 ] Top1: 41.85%
128
+ [ Thu Sep 15 22:06:33 2022 ] Top5: 72.58%
129
+ [ Thu Sep 15 22:06:33 2022 ] Training epoch: 20
130
+ [ Thu Sep 15 22:06:53 2022 ] Batch(21/162) done. Loss: 0.6220 lr:0.100000 network_time: 0.0266
131
+ [ Thu Sep 15 22:08:06 2022 ] Batch(121/162) done. Loss: 0.8370 lr:0.100000 network_time: 0.0304
132
+ [ Thu Sep 15 22:08:35 2022 ] Eval epoch: 20
133
+ [ Thu Sep 15 22:10:24 2022 ] Mean test loss of 930 batches: 3.217198610305786.
134
+ [ Thu Sep 15 22:10:25 2022 ] Top1: 39.58%
135
+ [ Thu Sep 15 22:10:25 2022 ] Top5: 71.58%
136
+ [ Thu Sep 15 22:10:25 2022 ] Training epoch: 21
137
+ [ Thu Sep 15 22:11:12 2022 ] Batch(59/162) done. Loss: 0.5685 lr:0.100000 network_time: 0.0261
138
+ [ Thu Sep 15 22:12:26 2022 ] Batch(159/162) done. Loss: 0.4968 lr:0.100000 network_time: 0.0279
139
+ [ Thu Sep 15 22:12:28 2022 ] Eval epoch: 21
140
+ [ Thu Sep 15 22:14:18 2022 ] Mean test loss of 930 batches: 2.607473850250244.
141
+ [ Thu Sep 15 22:14:18 2022 ] Top1: 45.13%
142
+ [ Thu Sep 15 22:14:19 2022 ] Top5: 74.89%
143
+ [ Thu Sep 15 22:14:19 2022 ] Training epoch: 22
144
+ [ Thu Sep 15 22:15:34 2022 ] Batch(97/162) done. Loss: 0.6277 lr:0.100000 network_time: 0.0301
145
+ [ Thu Sep 15 22:16:21 2022 ] Eval epoch: 22
146
+ [ Thu Sep 15 22:18:10 2022 ] Mean test loss of 930 batches: 3.202122688293457.
147
+ [ Thu Sep 15 22:18:11 2022 ] Top1: 41.16%
148
+ [ Thu Sep 15 22:18:11 2022 ] Top5: 70.16%
149
+ [ Thu Sep 15 22:18:12 2022 ] Training epoch: 23
150
+ [ Thu Sep 15 22:18:41 2022 ] Batch(35/162) done. Loss: 0.4671 lr:0.100000 network_time: 0.0306
151
+ [ Thu Sep 15 22:19:53 2022 ] Batch(135/162) done. Loss: 0.2605 lr:0.100000 network_time: 0.0295
152
+ [ Thu Sep 15 22:20:12 2022 ] Eval epoch: 23
153
+ [ Thu Sep 15 22:22:01 2022 ] Mean test loss of 930 batches: 2.9939019680023193.
154
+ [ Thu Sep 15 22:22:02 2022 ] Top1: 39.67%
155
+ [ Thu Sep 15 22:22:02 2022 ] Top5: 71.05%
156
+ [ Thu Sep 15 22:22:03 2022 ] Training epoch: 24
157
+ [ Thu Sep 15 22:23:00 2022 ] Batch(73/162) done. Loss: 0.3403 lr:0.100000 network_time: 0.0303
158
+ [ Thu Sep 15 22:24:04 2022 ] Eval epoch: 24
159
+ [ Thu Sep 15 22:25:54 2022 ] Mean test loss of 930 batches: 2.6512110233306885.
160
+ [ Thu Sep 15 22:25:55 2022 ] Top1: 43.70%
161
+ [ Thu Sep 15 22:25:55 2022 ] Top5: 74.94%
162
+ [ Thu Sep 15 22:25:56 2022 ] Training epoch: 25
163
+ [ Thu Sep 15 22:26:08 2022 ] Batch(11/162) done. Loss: 0.3482 lr:0.100000 network_time: 0.0267
164
+ [ Thu Sep 15 22:27:20 2022 ] Batch(111/162) done. Loss: 0.5646 lr:0.100000 network_time: 0.0248
165
+ [ Thu Sep 15 22:27:57 2022 ] Eval epoch: 25
166
+ [ Thu Sep 15 22:29:47 2022 ] Mean test loss of 930 batches: 2.80203914642334.
167
+ [ Thu Sep 15 22:29:48 2022 ] Top1: 43.78%
168
+ [ Thu Sep 15 22:29:48 2022 ] Top5: 75.42%
169
+ [ Thu Sep 15 22:29:48 2022 ] Training epoch: 26
170
+ [ Thu Sep 15 22:30:28 2022 ] Batch(49/162) done. Loss: 0.4110 lr:0.100000 network_time: 0.0359
171
+ [ Thu Sep 15 22:31:41 2022 ] Batch(149/162) done. Loss: 0.6011 lr:0.100000 network_time: 0.0454
172
+ [ Thu Sep 15 22:31:50 2022 ] Eval epoch: 26
173
+ [ Thu Sep 15 22:33:40 2022 ] Mean test loss of 930 batches: 3.0699634552001953.
174
+ [ Thu Sep 15 22:33:40 2022 ] Top1: 42.35%
175
+ [ Thu Sep 15 22:33:41 2022 ] Top5: 72.71%
176
+ [ Thu Sep 15 22:33:41 2022 ] Training epoch: 27
177
+ [ Thu Sep 15 22:34:49 2022 ] Batch(87/162) done. Loss: 0.3504 lr:0.100000 network_time: 0.0268
178
+ [ Thu Sep 15 22:35:43 2022 ] Eval epoch: 27
179
+ [ Thu Sep 15 22:37:33 2022 ] Mean test loss of 930 batches: 3.0952885150909424.
180
+ [ Thu Sep 15 22:37:34 2022 ] Top1: 42.13%
181
+ [ Thu Sep 15 22:37:34 2022 ] Top5: 71.48%
182
+ [ Thu Sep 15 22:37:35 2022 ] Training epoch: 28
183
+ [ Thu Sep 15 22:37:57 2022 ] Batch(25/162) done. Loss: 0.2971 lr:0.100000 network_time: 0.0287
184
+ [ Thu Sep 15 22:39:10 2022 ] Batch(125/162) done. Loss: 0.4361 lr:0.100000 network_time: 0.0265
185
+ [ Thu Sep 15 22:39:36 2022 ] Eval epoch: 28
186
+ [ Thu Sep 15 22:41:27 2022 ] Mean test loss of 930 batches: 2.9900245666503906.
187
+ [ Thu Sep 15 22:41:27 2022 ] Top1: 43.13%
188
+ [ Thu Sep 15 22:41:28 2022 ] Top5: 73.28%
189
+ [ Thu Sep 15 22:41:28 2022 ] Training epoch: 29
190
+ [ Thu Sep 15 22:42:18 2022 ] Batch(63/162) done. Loss: 0.3050 lr:0.100000 network_time: 0.0258
191
+ [ Thu Sep 15 22:43:30 2022 ] Eval epoch: 29
192
+ [ Thu Sep 15 22:45:21 2022 ] Mean test loss of 930 batches: 2.6688146591186523.
193
+ [ Thu Sep 15 22:45:21 2022 ] Top1: 47.25%
194
+ [ Thu Sep 15 22:45:22 2022 ] Top5: 76.30%
195
+ [ Thu Sep 15 22:45:22 2022 ] Training epoch: 30
196
+ [ Thu Sep 15 22:45:26 2022 ] Batch(1/162) done. Loss: 0.2491 lr:0.100000 network_time: 0.0287
197
+ [ Thu Sep 15 22:46:39 2022 ] Batch(101/162) done. Loss: 0.2432 lr:0.100000 network_time: 0.0323
198
+ [ Thu Sep 15 22:47:23 2022 ] Eval epoch: 30
199
+ [ Thu Sep 15 22:49:12 2022 ] Mean test loss of 930 batches: 2.776120901107788.
200
+ [ Thu Sep 15 22:49:12 2022 ] Top1: 44.79%
201
+ [ Thu Sep 15 22:49:13 2022 ] Top5: 74.90%
202
+ [ Thu Sep 15 22:49:13 2022 ] Training epoch: 31
203
+ [ Thu Sep 15 22:49:45 2022 ] Batch(39/162) done. Loss: 0.3644 lr:0.100000 network_time: 0.0311
204
+ [ Thu Sep 15 22:50:57 2022 ] Batch(139/162) done. Loss: 0.6405 lr:0.100000 network_time: 0.0285
205
+ [ Thu Sep 15 22:51:14 2022 ] Eval epoch: 31
206
+ [ Thu Sep 15 22:53:03 2022 ] Mean test loss of 930 batches: 2.716003179550171.
207
+ [ Thu Sep 15 22:53:04 2022 ] Top1: 44.46%
208
+ [ Thu Sep 15 22:53:04 2022 ] Top5: 74.83%
209
+ [ Thu Sep 15 22:53:05 2022 ] Training epoch: 32
210
+ [ Thu Sep 15 22:54:04 2022 ] Batch(77/162) done. Loss: 0.2649 lr:0.100000 network_time: 0.0276
211
+ [ Thu Sep 15 22:55:05 2022 ] Eval epoch: 32
212
+ [ Thu Sep 15 22:56:56 2022 ] Mean test loss of 930 batches: 2.863656997680664.
213
+ [ Thu Sep 15 22:56:56 2022 ] Top1: 45.39%
214
+ [ Thu Sep 15 22:56:57 2022 ] Top5: 74.88%
215
+ [ Thu Sep 15 22:56:57 2022 ] Training epoch: 33
216
+ [ Thu Sep 15 22:57:12 2022 ] Batch(15/162) done. Loss: 0.3665 lr:0.100000 network_time: 0.0302
217
+ [ Thu Sep 15 22:58:24 2022 ] Batch(115/162) done. Loss: 0.7653 lr:0.100000 network_time: 0.0272
218
+ [ Thu Sep 15 22:58:58 2022 ] Eval epoch: 33
219
+ [ Thu Sep 15 23:00:47 2022 ] Mean test loss of 930 batches: 2.677206039428711.
220
+ [ Thu Sep 15 23:00:48 2022 ] Top1: 45.82%
221
+ [ Thu Sep 15 23:00:48 2022 ] Top5: 75.90%
222
+ [ Thu Sep 15 23:00:48 2022 ] Training epoch: 34
223
+ [ Thu Sep 15 23:01:30 2022 ] Batch(53/162) done. Loss: 0.3284 lr:0.100000 network_time: 0.0271
224
+ [ Thu Sep 15 23:02:43 2022 ] Batch(153/162) done. Loss: 0.4470 lr:0.100000 network_time: 0.0269
225
+ [ Thu Sep 15 23:02:49 2022 ] Eval epoch: 34
226
+ [ Thu Sep 15 23:04:38 2022 ] Mean test loss of 930 batches: 2.884477138519287.
227
+ [ Thu Sep 15 23:04:38 2022 ] Top1: 43.68%
228
+ [ Thu Sep 15 23:04:39 2022 ] Top5: 74.68%
229
+ [ Thu Sep 15 23:04:39 2022 ] Training epoch: 35
230
+ [ Thu Sep 15 23:05:48 2022 ] Batch(91/162) done. Loss: 0.4457 lr:0.100000 network_time: 0.0276
231
+ [ Thu Sep 15 23:06:39 2022 ] Eval epoch: 35
232
+ [ Thu Sep 15 23:08:29 2022 ] Mean test loss of 930 batches: 3.4506053924560547.
233
+ [ Thu Sep 15 23:08:29 2022 ] Top1: 42.19%
234
+ [ Thu Sep 15 23:08:30 2022 ] Top5: 71.98%
235
+ [ Thu Sep 15 23:08:30 2022 ] Training epoch: 36
236
+ [ Thu Sep 15 23:08:54 2022 ] Batch(29/162) done. Loss: 0.2119 lr:0.100000 network_time: 0.0262
237
+ [ Thu Sep 15 23:10:07 2022 ] Batch(129/162) done. Loss: 0.4515 lr:0.100000 network_time: 0.0281
238
+ [ Thu Sep 15 23:10:31 2022 ] Eval epoch: 36
239
+ [ Thu Sep 15 23:12:20 2022 ] Mean test loss of 930 batches: 2.7693991661071777.
240
+ [ Thu Sep 15 23:12:20 2022 ] Top1: 45.62%
241
+ [ Thu Sep 15 23:12:20 2022 ] Top5: 76.00%
242
+ [ Thu Sep 15 23:12:21 2022 ] Training epoch: 37
243
+ [ Thu Sep 15 23:13:13 2022 ] Batch(67/162) done. Loss: 0.2028 lr:0.100000 network_time: 0.0270
244
+ [ Thu Sep 15 23:14:22 2022 ] Eval epoch: 37
245
+ [ Thu Sep 15 23:16:11 2022 ] Mean test loss of 930 batches: 3.095299005508423.
246
+ [ Thu Sep 15 23:16:12 2022 ] Top1: 42.58%
247
+ [ Thu Sep 15 23:16:12 2022 ] Top5: 73.84%
248
+ [ Thu Sep 15 23:16:12 2022 ] Training epoch: 38
249
+ [ Thu Sep 15 23:16:20 2022 ] Batch(5/162) done. Loss: 0.4349 lr:0.100000 network_time: 0.0282
250
+ [ Thu Sep 15 23:17:32 2022 ] Batch(105/162) done. Loss: 0.2566 lr:0.100000 network_time: 0.0271
251
+ [ Thu Sep 15 23:18:13 2022 ] Eval epoch: 38
252
+ [ Thu Sep 15 23:20:03 2022 ] Mean test loss of 930 batches: 3.1413280963897705.
253
+ [ Thu Sep 15 23:20:04 2022 ] Top1: 43.90%
254
+ [ Thu Sep 15 23:20:04 2022 ] Top5: 74.12%
255
+ [ Thu Sep 15 23:20:04 2022 ] Training epoch: 39
256
+ [ Thu Sep 15 23:20:39 2022 ] Batch(43/162) done. Loss: 0.2047 lr:0.100000 network_time: 0.0348
257
+ [ Thu Sep 15 23:21:52 2022 ] Batch(143/162) done. Loss: 0.3025 lr:0.100000 network_time: 0.0266
258
+ [ Thu Sep 15 23:22:05 2022 ] Eval epoch: 39
259
+ [ Thu Sep 15 23:23:54 2022 ] Mean test loss of 930 batches: 3.481278419494629.
260
+ [ Thu Sep 15 23:23:54 2022 ] Top1: 41.59%
261
+ [ Thu Sep 15 23:23:55 2022 ] Top5: 72.09%
262
+ [ Thu Sep 15 23:23:55 2022 ] Training epoch: 40
263
+ [ Thu Sep 15 23:24:57 2022 ] Batch(81/162) done. Loss: 0.3147 lr:0.100000 network_time: 0.0350
264
+ [ Thu Sep 15 23:25:56 2022 ] Eval epoch: 40
265
+ [ Thu Sep 15 23:27:45 2022 ] Mean test loss of 930 batches: 2.9946095943450928.
266
+ [ Thu Sep 15 23:27:46 2022 ] Top1: 44.16%
267
+ [ Thu Sep 15 23:27:46 2022 ] Top5: 74.71%
268
+ [ Thu Sep 15 23:27:46 2022 ] Training epoch: 41
269
+ [ Thu Sep 15 23:28:03 2022 ] Batch(19/162) done. Loss: 0.3987 lr:0.100000 network_time: 0.0277
270
+ [ Thu Sep 15 23:29:16 2022 ] Batch(119/162) done. Loss: 0.4457 lr:0.100000 network_time: 0.0273
271
+ [ Thu Sep 15 23:29:47 2022 ] Eval epoch: 41
272
+ [ Thu Sep 15 23:31:36 2022 ] Mean test loss of 930 batches: 3.4053616523742676.
273
+ [ Thu Sep 15 23:31:37 2022 ] Top1: 43.50%
274
+ [ Thu Sep 15 23:31:37 2022 ] Top5: 73.65%
275
+ [ Thu Sep 15 23:31:38 2022 ] Training epoch: 42
276
+ [ Thu Sep 15 23:32:22 2022 ] Batch(57/162) done. Loss: 0.2492 lr:0.100000 network_time: 0.0278
277
+ [ Thu Sep 15 23:33:35 2022 ] Batch(157/162) done. Loss: 0.4862 lr:0.100000 network_time: 0.0267
278
+ [ Thu Sep 15 23:33:38 2022 ] Eval epoch: 42
279
+ [ Thu Sep 15 23:35:28 2022 ] Mean test loss of 930 batches: 2.9815728664398193.
280
+ [ Thu Sep 15 23:35:28 2022 ] Top1: 46.10%
281
+ [ Thu Sep 15 23:35:28 2022 ] Top5: 74.05%
282
+ [ Thu Sep 15 23:35:29 2022 ] Training epoch: 43
283
+ [ Thu Sep 15 23:36:41 2022 ] Batch(95/162) done. Loss: 0.4019 lr:0.100000 network_time: 0.0485
284
+ [ Thu Sep 15 23:37:29 2022 ] Eval epoch: 43
285
+ [ Thu Sep 15 23:39:19 2022 ] Mean test loss of 930 batches: 2.9869003295898438.
286
+ [ Thu Sep 15 23:39:20 2022 ] Top1: 46.54%
287
+ [ Thu Sep 15 23:39:20 2022 ] Top5: 77.24%
288
+ [ Thu Sep 15 23:39:20 2022 ] Training epoch: 44
289
+ [ Thu Sep 15 23:39:48 2022 ] Batch(33/162) done. Loss: 0.2617 lr:0.100000 network_time: 0.0308
290
+ [ Thu Sep 15 23:41:00 2022 ] Batch(133/162) done. Loss: 0.3671 lr:0.100000 network_time: 0.0278
291
+ [ Thu Sep 15 23:41:21 2022 ] Eval epoch: 44
292
+ [ Thu Sep 15 23:43:10 2022 ] Mean test loss of 930 batches: 3.2402682304382324.
293
+ [ Thu Sep 15 23:43:11 2022 ] Top1: 41.60%
294
+ [ Thu Sep 15 23:43:11 2022 ] Top5: 72.04%
295
+ [ Thu Sep 15 23:43:11 2022 ] Training epoch: 45
296
+ [ Thu Sep 15 23:44:06 2022 ] Batch(71/162) done. Loss: 0.3470 lr:0.100000 network_time: 0.0251
297
+ [ Thu Sep 15 23:45:12 2022 ] Eval epoch: 45
298
+ [ Thu Sep 15 23:47:01 2022 ] Mean test loss of 930 batches: 2.804935932159424.
299
+ [ Thu Sep 15 23:47:01 2022 ] Top1: 46.84%
300
+ [ Thu Sep 15 23:47:02 2022 ] Top5: 76.32%
301
+ [ Thu Sep 15 23:47:02 2022 ] Training epoch: 46
302
+ [ Thu Sep 15 23:47:12 2022 ] Batch(9/162) done. Loss: 0.1822 lr:0.100000 network_time: 0.0267
303
+ [ Thu Sep 15 23:48:24 2022 ] Batch(109/162) done. Loss: 0.1606 lr:0.100000 network_time: 0.0268
304
+ [ Thu Sep 15 23:49:02 2022 ] Eval epoch: 46
305
+ [ Thu Sep 15 23:50:53 2022 ] Mean test loss of 930 batches: 2.79233980178833.
306
+ [ Thu Sep 15 23:50:53 2022 ] Top1: 46.51%
307
+ [ Thu Sep 15 23:50:54 2022 ] Top5: 77.12%
308
+ [ Thu Sep 15 23:50:54 2022 ] Training epoch: 47
309
+ [ Thu Sep 15 23:51:32 2022 ] Batch(47/162) done. Loss: 0.2231 lr:0.100000 network_time: 0.0254
310
+ [ Thu Sep 15 23:52:45 2022 ] Batch(147/162) done. Loss: 0.2199 lr:0.100000 network_time: 0.0330
311
+ [ Thu Sep 15 23:52:55 2022 ] Eval epoch: 47
312
+ [ Thu Sep 15 23:54:45 2022 ] Mean test loss of 930 batches: 2.7845840454101562.
313
+ [ Thu Sep 15 23:54:45 2022 ] Top1: 46.73%
314
+ [ Thu Sep 15 23:54:46 2022 ] Top5: 76.58%
315
+ [ Thu Sep 15 23:54:46 2022 ] Training epoch: 48
316
+ [ Thu Sep 15 23:55:51 2022 ] Batch(85/162) done. Loss: 0.1455 lr:0.100000 network_time: 0.0265
317
+ [ Thu Sep 15 23:56:47 2022 ] Eval epoch: 48
318
+ [ Thu Sep 15 23:58:36 2022 ] Mean test loss of 930 batches: 2.9818427562713623.
319
+ [ Thu Sep 15 23:58:37 2022 ] Top1: 44.06%
320
+ [ Thu Sep 15 23:58:37 2022 ] Top5: 74.52%
321
+ [ Thu Sep 15 23:58:37 2022 ] Training epoch: 49
322
+ [ Thu Sep 15 23:58:58 2022 ] Batch(23/162) done. Loss: 0.1679 lr:0.100000 network_time: 0.0315
323
+ [ Fri Sep 16 00:00:10 2022 ] Batch(123/162) done. Loss: 0.2418 lr:0.100000 network_time: 0.0252
324
+ [ Fri Sep 16 00:00:38 2022 ] Eval epoch: 49
325
+ [ Fri Sep 16 00:02:28 2022 ] Mean test loss of 930 batches: 3.2953529357910156.
326
+ [ Fri Sep 16 00:02:28 2022 ] Top1: 44.71%
327
+ [ Fri Sep 16 00:02:28 2022 ] Top5: 73.97%
328
+ [ Fri Sep 16 00:02:29 2022 ] Training epoch: 50
329
+ [ Fri Sep 16 00:03:17 2022 ] Batch(61/162) done. Loss: 0.2752 lr:0.100000 network_time: 0.0311
330
+ [ Fri Sep 16 00:04:29 2022 ] Batch(161/162) done. Loss: 0.2480 lr:0.100000 network_time: 0.0275
331
+ [ Fri Sep 16 00:04:29 2022 ] Eval epoch: 50
332
+ [ Fri Sep 16 00:06:19 2022 ] Mean test loss of 930 batches: 3.178257942199707.
333
+ [ Fri Sep 16 00:06:20 2022 ] Top1: 46.33%
334
+ [ Fri Sep 16 00:06:20 2022 ] Top5: 75.49%
335
+ [ Fri Sep 16 00:06:21 2022 ] Training epoch: 51
336
+ [ Fri Sep 16 00:07:36 2022 ] Batch(99/162) done. Loss: 0.2603 lr:0.100000 network_time: 0.0318
337
+ [ Fri Sep 16 00:08:21 2022 ] Eval epoch: 51
338
+ [ Fri Sep 16 00:10:11 2022 ] Mean test loss of 930 batches: 2.8996267318725586.
339
+ [ Fri Sep 16 00:10:11 2022 ] Top1: 45.21%
340
+ [ Fri Sep 16 00:10:12 2022 ] Top5: 75.58%
341
+ [ Fri Sep 16 00:10:12 2022 ] Training epoch: 52
342
+ [ Fri Sep 16 00:10:43 2022 ] Batch(37/162) done. Loss: 0.2263 lr:0.100000 network_time: 0.0314
343
+ [ Fri Sep 16 00:11:55 2022 ] Batch(137/162) done. Loss: 0.2389 lr:0.100000 network_time: 0.0305
344
+ [ Fri Sep 16 00:12:13 2022 ] Eval epoch: 52
345
+ [ Fri Sep 16 00:14:03 2022 ] Mean test loss of 930 batches: 3.1332056522369385.
346
+ [ Fri Sep 16 00:14:04 2022 ] Top1: 43.39%
347
+ [ Fri Sep 16 00:14:04 2022 ] Top5: 74.20%
348
+ [ Fri Sep 16 00:14:05 2022 ] Training epoch: 53
349
+ [ Fri Sep 16 00:15:02 2022 ] Batch(75/162) done. Loss: 0.2281 lr:0.100000 network_time: 0.0278
350
+ [ Fri Sep 16 00:16:05 2022 ] Eval epoch: 53
351
+ [ Fri Sep 16 00:17:55 2022 ] Mean test loss of 930 batches: 3.1676747798919678.
352
+ [ Fri Sep 16 00:17:55 2022 ] Top1: 45.65%
353
+ [ Fri Sep 16 00:17:56 2022 ] Top5: 74.51%
354
+ [ Fri Sep 16 00:17:56 2022 ] Training epoch: 54
355
+ [ Fri Sep 16 00:18:09 2022 ] Batch(13/162) done. Loss: 0.2242 lr:0.100000 network_time: 0.0284
356
+ [ Fri Sep 16 00:19:21 2022 ] Batch(113/162) done. Loss: 0.3021 lr:0.100000 network_time: 0.0271
357
+ [ Fri Sep 16 00:19:56 2022 ] Eval epoch: 54
358
+ [ Fri Sep 16 00:21:46 2022 ] Mean test loss of 930 batches: 2.7405104637145996.
359
+ [ Fri Sep 16 00:21:47 2022 ] Top1: 47.49%
360
+ [ Fri Sep 16 00:21:47 2022 ] Top5: 77.36%
361
+ [ Fri Sep 16 00:21:47 2022 ] Training epoch: 55
362
+ [ Fri Sep 16 00:22:28 2022 ] Batch(51/162) done. Loss: 0.0692 lr:0.100000 network_time: 0.0263
363
+ [ Fri Sep 16 00:23:40 2022 ] Batch(151/162) done. Loss: 0.1702 lr:0.100000 network_time: 0.0274
364
+ [ Fri Sep 16 00:23:47 2022 ] Eval epoch: 55
365
+ [ Fri Sep 16 00:25:37 2022 ] Mean test loss of 930 batches: 2.920945167541504.
366
+ [ Fri Sep 16 00:25:38 2022 ] Top1: 45.22%
367
+ [ Fri Sep 16 00:25:38 2022 ] Top5: 74.90%
368
+ [ Fri Sep 16 00:25:38 2022 ] Training epoch: 56
369
+ [ Fri Sep 16 00:26:46 2022 ] Batch(89/162) done. Loss: 0.2224 lr:0.100000 network_time: 0.0254
370
+ [ Fri Sep 16 00:27:39 2022 ] Eval epoch: 56
371
+ [ Fri Sep 16 00:29:29 2022 ] Mean test loss of 930 batches: 2.671355724334717.
372
+ [ Fri Sep 16 00:29:29 2022 ] Top1: 47.52%
373
+ [ Fri Sep 16 00:29:30 2022 ] Top5: 77.24%
374
+ [ Fri Sep 16 00:29:30 2022 ] Training epoch: 57
375
+ [ Fri Sep 16 00:29:53 2022 ] Batch(27/162) done. Loss: 0.2738 lr:0.100000 network_time: 0.0271
376
+ [ Fri Sep 16 00:31:05 2022 ] Batch(127/162) done. Loss: 0.2604 lr:0.100000 network_time: 0.0356
377
+ [ Fri Sep 16 00:31:30 2022 ] Eval epoch: 57
378
+ [ Fri Sep 16 00:33:20 2022 ] Mean test loss of 930 batches: 2.9581689834594727.
379
+ [ Fri Sep 16 00:33:20 2022 ] Top1: 46.12%
380
+ [ Fri Sep 16 00:33:21 2022 ] Top5: 75.99%
381
+ [ Fri Sep 16 00:33:21 2022 ] Training epoch: 58
382
+ [ Fri Sep 16 00:34:11 2022 ] Batch(65/162) done. Loss: 0.1503 lr:0.100000 network_time: 0.0274
383
+ [ Fri Sep 16 00:35:21 2022 ] Eval epoch: 58
384
+ [ Fri Sep 16 00:37:11 2022 ] Mean test loss of 930 batches: 2.9120888710021973.
385
+ [ Fri Sep 16 00:37:11 2022 ] Top1: 47.65%
386
+ [ Fri Sep 16 00:37:12 2022 ] Top5: 77.31%
387
+ [ Fri Sep 16 00:37:12 2022 ] Training epoch: 59
388
+ [ Fri Sep 16 00:37:17 2022 ] Batch(3/162) done. Loss: 0.2663 lr:0.100000 network_time: 0.0256
389
+ [ Fri Sep 16 00:38:30 2022 ] Batch(103/162) done. Loss: 0.1600 lr:0.100000 network_time: 0.0275
390
+ [ Fri Sep 16 00:39:12 2022 ] Eval epoch: 59
391
+ [ Fri Sep 16 00:41:02 2022 ] Mean test loss of 930 batches: 2.8816802501678467.
392
+ [ Fri Sep 16 00:41:02 2022 ] Top1: 46.62%
393
+ [ Fri Sep 16 00:41:03 2022 ] Top5: 75.71%
394
+ [ Fri Sep 16 00:41:03 2022 ] Training epoch: 60
395
+ [ Fri Sep 16 00:41:36 2022 ] Batch(41/162) done. Loss: 0.3349 lr:0.100000 network_time: 0.0259
396
+ [ Fri Sep 16 00:42:49 2022 ] Batch(141/162) done. Loss: 0.2002 lr:0.100000 network_time: 0.0256
397
+ [ Fri Sep 16 00:43:04 2022 ] Eval epoch: 60
398
+ [ Fri Sep 16 00:44:53 2022 ] Mean test loss of 930 batches: 3.1312150955200195.
399
+ [ Fri Sep 16 00:44:54 2022 ] Top1: 46.17%
400
+ [ Fri Sep 16 00:44:54 2022 ] Top5: 74.38%
401
+ [ Fri Sep 16 00:44:54 2022 ] Training epoch: 61
402
+ [ Fri Sep 16 00:45:55 2022 ] Batch(79/162) done. Loss: 0.1466 lr:0.010000 network_time: 0.0264
403
+ [ Fri Sep 16 00:46:55 2022 ] Eval epoch: 61
404
+ [ Fri Sep 16 00:48:45 2022 ] Mean test loss of 930 batches: 2.4748167991638184.
405
+ [ Fri Sep 16 00:48:45 2022 ] Top1: 53.49%
406
+ [ Fri Sep 16 00:48:46 2022 ] Top5: 80.49%
407
+ [ Fri Sep 16 00:48:46 2022 ] Training epoch: 62
408
+ [ Fri Sep 16 00:49:01 2022 ] Batch(17/162) done. Loss: 0.0275 lr:0.010000 network_time: 0.0258
409
+ [ Fri Sep 16 00:50:14 2022 ] Batch(117/162) done. Loss: 0.0287 lr:0.010000 network_time: 0.0290
410
+ [ Fri Sep 16 00:50:46 2022 ] Eval epoch: 62
411
+ [ Fri Sep 16 00:52:36 2022 ] Mean test loss of 930 batches: 2.490679979324341.
412
+ [ Fri Sep 16 00:52:36 2022 ] Top1: 53.89%
413
+ [ Fri Sep 16 00:52:37 2022 ] Top5: 80.59%
414
+ [ Fri Sep 16 00:52:37 2022 ] Training epoch: 63
415
+ [ Fri Sep 16 00:53:20 2022 ] Batch(55/162) done. Loss: 0.0312 lr:0.010000 network_time: 0.0266
416
+ [ Fri Sep 16 00:54:33 2022 ] Batch(155/162) done. Loss: 0.0125 lr:0.010000 network_time: 0.0260
417
+ [ Fri Sep 16 00:54:37 2022 ] Eval epoch: 63
418
+ [ Fri Sep 16 00:56:27 2022 ] Mean test loss of 930 batches: 2.4980976581573486.
419
+ [ Fri Sep 16 00:56:27 2022 ] Top1: 54.03%
420
+ [ Fri Sep 16 00:56:28 2022 ] Top5: 80.86%
421
+ [ Fri Sep 16 00:56:28 2022 ] Training epoch: 64
422
+ [ Fri Sep 16 00:57:39 2022 ] Batch(93/162) done. Loss: 0.0411 lr:0.010000 network_time: 0.0260
423
+ [ Fri Sep 16 00:58:29 2022 ] Eval epoch: 64
424
+ [ Fri Sep 16 01:00:18 2022 ] Mean test loss of 930 batches: 2.519536018371582.
425
+ [ Fri Sep 16 01:00:18 2022 ] Top1: 54.18%
426
+ [ Fri Sep 16 01:00:19 2022 ] Top5: 80.82%
427
+ [ Fri Sep 16 01:00:19 2022 ] Training epoch: 65
428
+ [ Fri Sep 16 01:00:45 2022 ] Batch(31/162) done. Loss: 0.0299 lr:0.010000 network_time: 0.0276
429
+ [ Fri Sep 16 01:01:58 2022 ] Batch(131/162) done. Loss: 0.0315 lr:0.010000 network_time: 0.0299
430
+ [ Fri Sep 16 01:02:20 2022 ] Eval epoch: 65
431
+ [ Fri Sep 16 01:04:09 2022 ] Mean test loss of 930 batches: 2.5533487796783447.
432
+ [ Fri Sep 16 01:04:09 2022 ] Top1: 53.81%
433
+ [ Fri Sep 16 01:04:10 2022 ] Top5: 80.72%
434
+ [ Fri Sep 16 01:04:10 2022 ] Training epoch: 66
435
+ [ Fri Sep 16 01:05:04 2022 ] Batch(69/162) done. Loss: 0.0256 lr:0.010000 network_time: 0.0266
436
+ [ Fri Sep 16 01:06:11 2022 ] Eval epoch: 66
437
+ [ Fri Sep 16 01:08:01 2022 ] Mean test loss of 930 batches: 2.5415453910827637.
438
+ [ Fri Sep 16 01:08:01 2022 ] Top1: 54.21%
439
+ [ Fri Sep 16 01:08:01 2022 ] Top5: 80.87%
440
+ [ Fri Sep 16 01:08:02 2022 ] Training epoch: 67
441
+ [ Fri Sep 16 01:08:11 2022 ] Batch(7/162) done. Loss: 0.0065 lr:0.010000 network_time: 0.0315
442
+ [ Fri Sep 16 01:09:23 2022 ] Batch(107/162) done. Loss: 0.0125 lr:0.010000 network_time: 0.0663
443
+ [ Fri Sep 16 01:10:03 2022 ] Eval epoch: 67
444
+ [ Fri Sep 16 01:11:52 2022 ] Mean test loss of 930 batches: 2.536372661590576.
445
+ [ Fri Sep 16 01:11:53 2022 ] Top1: 54.25%
446
+ [ Fri Sep 16 01:11:53 2022 ] Top5: 80.92%
447
+ [ Fri Sep 16 01:11:53 2022 ] Training epoch: 68
448
+ [ Fri Sep 16 01:12:29 2022 ] Batch(45/162) done. Loss: 0.0395 lr:0.010000 network_time: 0.0299
449
+ [ Fri Sep 16 01:13:42 2022 ] Batch(145/162) done. Loss: 0.0242 lr:0.010000 network_time: 0.0269
450
+ [ Fri Sep 16 01:13:54 2022 ] Eval epoch: 68
451
+ [ Fri Sep 16 01:15:43 2022 ] Mean test loss of 930 batches: 2.539608955383301.
452
+ [ Fri Sep 16 01:15:43 2022 ] Top1: 54.20%
453
+ [ Fri Sep 16 01:15:44 2022 ] Top5: 81.04%
454
+ [ Fri Sep 16 01:15:44 2022 ] Training epoch: 69
455
+ [ Fri Sep 16 01:16:48 2022 ] Batch(83/162) done. Loss: 0.0367 lr:0.010000 network_time: 0.0266
456
+ [ Fri Sep 16 01:17:44 2022 ] Eval epoch: 69
457
+ [ Fri Sep 16 01:19:34 2022 ] Mean test loss of 930 batches: 2.5502073764801025.
458
+ [ Fri Sep 16 01:19:35 2022 ] Top1: 54.33%
459
+ [ Fri Sep 16 01:19:35 2022 ] Top5: 80.96%
460
+ [ Fri Sep 16 01:19:35 2022 ] Training epoch: 70
461
+ [ Fri Sep 16 01:19:55 2022 ] Batch(21/162) done. Loss: 0.0162 lr:0.010000 network_time: 0.0308
462
+ [ Fri Sep 16 01:21:07 2022 ] Batch(121/162) done. Loss: 0.0184 lr:0.010000 network_time: 0.0257
463
+ [ Fri Sep 16 01:21:36 2022 ] Eval epoch: 70
464
+ [ Fri Sep 16 01:23:26 2022 ] Mean test loss of 930 batches: 2.5347869396209717.
465
+ [ Fri Sep 16 01:23:27 2022 ] Top1: 54.52%
466
+ [ Fri Sep 16 01:23:27 2022 ] Top5: 81.08%
467
+ [ Fri Sep 16 01:23:27 2022 ] Training epoch: 71
468
+ [ Fri Sep 16 01:24:14 2022 ] Batch(59/162) done. Loss: 0.0151 lr:0.010000 network_time: 0.0277
469
+ [ Fri Sep 16 01:25:26 2022 ] Batch(159/162) done. Loss: 0.0078 lr:0.010000 network_time: 0.0268
470
+ [ Fri Sep 16 01:25:28 2022 ] Eval epoch: 71
471
+ [ Fri Sep 16 01:27:18 2022 ] Mean test loss of 930 batches: 2.5435900688171387.
472
+ [ Fri Sep 16 01:27:18 2022 ] Top1: 54.45%
473
+ [ Fri Sep 16 01:27:18 2022 ] Top5: 81.15%
474
+ [ Fri Sep 16 01:27:19 2022 ] Training epoch: 72
475
+ [ Fri Sep 16 01:28:33 2022 ] Batch(97/162) done. Loss: 0.0244 lr:0.010000 network_time: 0.0289
476
+ [ Fri Sep 16 01:29:19 2022 ] Eval epoch: 72
477
+ [ Fri Sep 16 01:31:09 2022 ] Mean test loss of 930 batches: 2.6118814945220947.
478
+ [ Fri Sep 16 01:31:09 2022 ] Top1: 54.16%
479
+ [ Fri Sep 16 01:31:10 2022 ] Top5: 80.86%
480
+ [ Fri Sep 16 01:31:10 2022 ] Training epoch: 73
481
+ [ Fri Sep 16 01:31:39 2022 ] Batch(35/162) done. Loss: 0.0033 lr:0.010000 network_time: 0.0262
482
+ [ Fri Sep 16 01:32:52 2022 ] Batch(135/162) done. Loss: 0.0129 lr:0.010000 network_time: 0.0261
483
+ [ Fri Sep 16 01:33:11 2022 ] Eval epoch: 73
484
+ [ Fri Sep 16 01:35:00 2022 ] Mean test loss of 930 batches: 2.569013833999634.
485
+ [ Fri Sep 16 01:35:01 2022 ] Top1: 54.44%
486
+ [ Fri Sep 16 01:35:01 2022 ] Top5: 81.23%
487
+ [ Fri Sep 16 01:35:01 2022 ] Training epoch: 74
488
+ [ Fri Sep 16 01:35:58 2022 ] Batch(73/162) done. Loss: 0.0043 lr:0.010000 network_time: 0.0440
489
+ [ Fri Sep 16 01:37:02 2022 ] Eval epoch: 74
490
+ [ Fri Sep 16 01:38:51 2022 ] Mean test loss of 930 batches: 2.572516679763794.
491
+ [ Fri Sep 16 01:38:52 2022 ] Top1: 54.38%
492
+ [ Fri Sep 16 01:38:52 2022 ] Top5: 81.14%
493
+ [ Fri Sep 16 01:38:52 2022 ] Training epoch: 75
494
+ [ Fri Sep 16 01:39:04 2022 ] Batch(11/162) done. Loss: 0.0047 lr:0.010000 network_time: 0.0298
495
+ [ Fri Sep 16 01:40:16 2022 ] Batch(111/162) done. Loss: 0.0327 lr:0.010000 network_time: 0.0266
496
+ [ Fri Sep 16 01:40:53 2022 ] Eval epoch: 75
497
+ [ Fri Sep 16 01:42:43 2022 ] Mean test loss of 930 batches: 2.5794084072113037.
498
+ [ Fri Sep 16 01:42:43 2022 ] Top1: 54.24%
499
+ [ Fri Sep 16 01:42:43 2022 ] Top5: 80.90%
500
+ [ Fri Sep 16 01:42:44 2022 ] Training epoch: 76
501
+ [ Fri Sep 16 01:43:23 2022 ] Batch(49/162) done. Loss: 0.0127 lr:0.010000 network_time: 0.0267
502
+ [ Fri Sep 16 01:44:35 2022 ] Batch(149/162) done. Loss: 0.0227 lr:0.010000 network_time: 0.0269
503
+ [ Fri Sep 16 01:44:44 2022 ] Eval epoch: 76
504
+ [ Fri Sep 16 01:46:34 2022 ] Mean test loss of 930 batches: 2.6028902530670166.
505
+ [ Fri Sep 16 01:46:34 2022 ] Top1: 54.26%
506
+ [ Fri Sep 16 01:46:35 2022 ] Top5: 80.83%
507
+ [ Fri Sep 16 01:46:35 2022 ] Training epoch: 77
508
+ [ Fri Sep 16 01:47:42 2022 ] Batch(87/162) done. Loss: 0.0140 lr:0.010000 network_time: 0.0266
509
+ [ Fri Sep 16 01:48:36 2022 ] Eval epoch: 77
510
+ [ Fri Sep 16 01:50:25 2022 ] Mean test loss of 930 batches: 2.587498903274536.
511
+ [ Fri Sep 16 01:50:25 2022 ] Top1: 54.37%
512
+ [ Fri Sep 16 01:50:26 2022 ] Top5: 80.98%
513
+ [ Fri Sep 16 01:50:26 2022 ] Training epoch: 78
514
+ [ Fri Sep 16 01:50:48 2022 ] Batch(25/162) done. Loss: 0.0059 lr:0.010000 network_time: 0.0265
515
+ [ Fri Sep 16 01:52:00 2022 ] Batch(125/162) done. Loss: 0.0070 lr:0.010000 network_time: 0.0260
516
+ [ Fri Sep 16 01:52:27 2022 ] Eval epoch: 78
517
+ [ Fri Sep 16 01:54:17 2022 ] Mean test loss of 930 batches: 2.579465866088867.
518
+ [ Fri Sep 16 01:54:17 2022 ] Top1: 54.72%
519
+ [ Fri Sep 16 01:54:17 2022 ] Top5: 81.03%
520
+ [ Fri Sep 16 01:54:18 2022 ] Training epoch: 79
521
+ [ Fri Sep 16 01:55:07 2022 ] Batch(63/162) done. Loss: 0.0047 lr:0.010000 network_time: 0.0279
522
+ [ Fri Sep 16 01:56:18 2022 ] Eval epoch: 79
523
+ [ Fri Sep 16 01:58:08 2022 ] Mean test loss of 930 batches: 2.603970527648926.
524
+ [ Fri Sep 16 01:58:08 2022 ] Top1: 54.30%
525
+ [ Fri Sep 16 01:58:09 2022 ] Top5: 80.87%
526
+ [ Fri Sep 16 01:58:09 2022 ] Training epoch: 80
527
+ [ Fri Sep 16 01:58:13 2022 ] Batch(1/162) done. Loss: 0.0055 lr:0.010000 network_time: 0.0293
528
+ [ Fri Sep 16 01:59:26 2022 ] Batch(101/162) done. Loss: 0.0043 lr:0.010000 network_time: 0.0263
529
+ [ Fri Sep 16 02:00:10 2022 ] Eval epoch: 80
530
+ [ Fri Sep 16 02:01:59 2022 ] Mean test loss of 930 batches: 2.5931570529937744.
531
+ [ Fri Sep 16 02:01:59 2022 ] Top1: 54.43%
532
+ [ Fri Sep 16 02:02:00 2022 ] Top5: 81.05%
533
+ [ Fri Sep 16 02:02:00 2022 ] Training epoch: 81
534
+ [ Fri Sep 16 02:02:32 2022 ] Batch(39/162) done. Loss: 0.0105 lr:0.001000 network_time: 0.0305
535
+ [ Fri Sep 16 02:03:44 2022 ] Batch(139/162) done. Loss: 0.0017 lr:0.001000 network_time: 0.0307
536
+ [ Fri Sep 16 02:04:00 2022 ] Eval epoch: 81
537
+ [ Fri Sep 16 02:05:50 2022 ] Mean test loss of 930 batches: 2.6085758209228516.
538
+ [ Fri Sep 16 02:05:51 2022 ] Top1: 54.33%
539
+ [ Fri Sep 16 02:05:51 2022 ] Top5: 80.87%
540
+ [ Fri Sep 16 02:05:51 2022 ] Training epoch: 82
541
+ [ Fri Sep 16 02:06:51 2022 ] Batch(77/162) done. Loss: 0.0111 lr:0.001000 network_time: 0.0309
542
+ [ Fri Sep 16 02:07:52 2022 ] Eval epoch: 82
543
+ [ Fri Sep 16 02:09:42 2022 ] Mean test loss of 930 batches: 2.5661566257476807.
544
+ [ Fri Sep 16 02:09:42 2022 ] Top1: 54.70%
545
+ [ Fri Sep 16 02:09:43 2022 ] Top5: 81.25%
546
+ [ Fri Sep 16 02:09:43 2022 ] Training epoch: 83
547
+ [ Fri Sep 16 02:09:57 2022 ] Batch(15/162) done. Loss: 0.0092 lr:0.001000 network_time: 0.0292
548
+ [ Fri Sep 16 02:11:10 2022 ] Batch(115/162) done. Loss: 0.0125 lr:0.001000 network_time: 0.0315
549
+ [ Fri Sep 16 02:11:44 2022 ] Eval epoch: 83
550
+ [ Fri Sep 16 02:13:33 2022 ] Mean test loss of 930 batches: 2.590534210205078.
551
+ [ Fri Sep 16 02:13:33 2022 ] Top1: 54.68%
552
+ [ Fri Sep 16 02:13:34 2022 ] Top5: 81.02%
553
+ [ Fri Sep 16 02:13:34 2022 ] Training epoch: 84
554
+ [ Fri Sep 16 02:14:16 2022 ] Batch(53/162) done. Loss: 0.0171 lr:0.001000 network_time: 0.0291
555
+ [ Fri Sep 16 02:15:29 2022 ] Batch(153/162) done. Loss: 0.0115 lr:0.001000 network_time: 0.0257
556
+ [ Fri Sep 16 02:15:35 2022 ] Eval epoch: 84
557
+ [ Fri Sep 16 02:17:25 2022 ] Mean test loss of 930 batches: 2.5949807167053223.
558
+ [ Fri Sep 16 02:17:25 2022 ] Top1: 54.61%
559
+ [ Fri Sep 16 02:17:25 2022 ] Top5: 80.92%
560
+ [ Fri Sep 16 02:17:26 2022 ] Training epoch: 85
561
+ [ Fri Sep 16 02:18:36 2022 ] Batch(91/162) done. Loss: 0.0127 lr:0.001000 network_time: 0.0315
562
+ [ Fri Sep 16 02:19:27 2022 ] Eval epoch: 85
563
+ [ Fri Sep 16 02:21:17 2022 ] Mean test loss of 930 batches: 2.608656406402588.
564
+ [ Fri Sep 16 02:21:17 2022 ] Top1: 54.46%
565
+ [ Fri Sep 16 02:21:18 2022 ] Top5: 81.11%
566
+ [ Fri Sep 16 02:21:18 2022 ] Training epoch: 86
567
+ [ Fri Sep 16 02:21:42 2022 ] Batch(29/162) done. Loss: 0.0120 lr:0.001000 network_time: 0.0271
568
+ [ Fri Sep 16 02:22:55 2022 ] Batch(129/162) done. Loss: 0.0073 lr:0.001000 network_time: 0.0270
569
+ [ Fri Sep 16 02:23:18 2022 ] Eval epoch: 86
570
+ [ Fri Sep 16 02:25:08 2022 ] Mean test loss of 930 batches: 2.586932420730591.
571
+ [ Fri Sep 16 02:25:08 2022 ] Top1: 54.47%
572
+ [ Fri Sep 16 02:25:09 2022 ] Top5: 81.06%
573
+ [ Fri Sep 16 02:25:09 2022 ] Training epoch: 87
574
+ [ Fri Sep 16 02:26:01 2022 ] Batch(67/162) done. Loss: 0.0127 lr:0.001000 network_time: 0.0279
575
+ [ Fri Sep 16 02:27:09 2022 ] Eval epoch: 87
576
+ [ Fri Sep 16 02:28:58 2022 ] Mean test loss of 930 batches: 2.5606770515441895.
577
+ [ Fri Sep 16 02:28:59 2022 ] Top1: 54.67%
578
+ [ Fri Sep 16 02:28:59 2022 ] Top5: 81.27%
579
+ [ Fri Sep 16 02:28:59 2022 ] Training epoch: 88
580
+ [ Fri Sep 16 02:29:07 2022 ] Batch(5/162) done. Loss: 0.0105 lr:0.001000 network_time: 0.0260
581
+ [ Fri Sep 16 02:30:19 2022 ] Batch(105/162) done. Loss: 0.0090 lr:0.001000 network_time: 0.0265
582
+ [ Fri Sep 16 02:31:00 2022 ] Eval epoch: 88
583
+ [ Fri Sep 16 02:32:49 2022 ] Mean test loss of 930 batches: 2.5782501697540283.
584
+ [ Fri Sep 16 02:32:49 2022 ] Top1: 54.59%
585
+ [ Fri Sep 16 02:32:50 2022 ] Top5: 81.03%
586
+ [ Fri Sep 16 02:32:50 2022 ] Training epoch: 89
587
+ [ Fri Sep 16 02:33:25 2022 ] Batch(43/162) done. Loss: 0.0029 lr:0.001000 network_time: 0.0306
588
+ [ Fri Sep 16 02:34:38 2022 ] Batch(143/162) done. Loss: 0.0068 lr:0.001000 network_time: 0.0255
589
+ [ Fri Sep 16 02:34:51 2022 ] Eval epoch: 89
590
+ [ Fri Sep 16 02:36:40 2022 ] Mean test loss of 930 batches: 2.583160161972046.
591
+ [ Fri Sep 16 02:36:41 2022 ] Top1: 54.79%
592
+ [ Fri Sep 16 02:36:41 2022 ] Top5: 81.18%
593
+ [ Fri Sep 16 02:36:41 2022 ] Training epoch: 90
594
+ [ Fri Sep 16 02:37:43 2022 ] Batch(81/162) done. Loss: 0.0046 lr:0.001000 network_time: 0.0283
595
+ [ Fri Sep 16 02:38:42 2022 ] Eval epoch: 90
596
+ [ Fri Sep 16 02:40:31 2022 ] Mean test loss of 930 batches: 2.5848512649536133.
597
+ [ Fri Sep 16 02:40:31 2022 ] Top1: 54.56%
598
+ [ Fri Sep 16 02:40:32 2022 ] Top5: 81.17%
599
+ [ Fri Sep 16 02:40:32 2022 ] Training epoch: 91
600
+ [ Fri Sep 16 02:40:49 2022 ] Batch(19/162) done. Loss: 0.0071 lr:0.001000 network_time: 0.0335
601
+ [ Fri Sep 16 02:42:01 2022 ] Batch(119/162) done. Loss: 0.0055 lr:0.001000 network_time: 0.0258
602
+ [ Fri Sep 16 02:42:32 2022 ] Eval epoch: 91
603
+ [ Fri Sep 16 02:44:21 2022 ] Mean test loss of 930 batches: 2.625767707824707.
604
+ [ Fri Sep 16 02:44:22 2022 ] Top1: 54.39%
605
+ [ Fri Sep 16 02:44:22 2022 ] Top5: 81.06%
606
+ [ Fri Sep 16 02:44:22 2022 ] Training epoch: 92
607
+ [ Fri Sep 16 02:45:07 2022 ] Batch(57/162) done. Loss: 0.0181 lr:0.001000 network_time: 0.0260
608
+ [ Fri Sep 16 02:46:20 2022 ] Batch(157/162) done. Loss: 0.0126 lr:0.001000 network_time: 0.0266
609
+ [ Fri Sep 16 02:46:23 2022 ] Eval epoch: 92
610
+ [ Fri Sep 16 02:48:12 2022 ] Mean test loss of 930 batches: 2.5934081077575684.
611
+ [ Fri Sep 16 02:48:13 2022 ] Top1: 54.54%
612
+ [ Fri Sep 16 02:48:13 2022 ] Top5: 81.03%
613
+ [ Fri Sep 16 02:48:13 2022 ] Training epoch: 93
614
+ [ Fri Sep 16 02:49:26 2022 ] Batch(95/162) done. Loss: 0.0061 lr:0.001000 network_time: 0.0678
615
+ [ Fri Sep 16 02:50:14 2022 ] Eval epoch: 93
616
+ [ Fri Sep 16 02:52:03 2022 ] Mean test loss of 930 batches: 2.5692086219787598.
617
+ [ Fri Sep 16 02:52:04 2022 ] Top1: 55.00%
618
+ [ Fri Sep 16 02:52:04 2022 ] Top5: 81.34%
619
+ [ Fri Sep 16 02:52:04 2022 ] Training epoch: 94
620
+ [ Fri Sep 16 02:52:32 2022 ] Batch(33/162) done. Loss: 0.0225 lr:0.001000 network_time: 0.0273
621
+ [ Fri Sep 16 02:53:45 2022 ] Batch(133/162) done. Loss: 0.0196 lr:0.001000 network_time: 0.0302
622
+ [ Fri Sep 16 02:54:05 2022 ] Eval epoch: 94
623
+ [ Fri Sep 16 02:55:55 2022 ] Mean test loss of 930 batches: 2.561540126800537.
624
+ [ Fri Sep 16 02:55:55 2022 ] Top1: 54.92%
625
+ [ Fri Sep 16 02:55:56 2022 ] Top5: 81.30%
626
+ [ Fri Sep 16 02:55:56 2022 ] Training epoch: 95
627
+ [ Fri Sep 16 02:56:51 2022 ] Batch(71/162) done. Loss: 0.0138 lr:0.001000 network_time: 0.0272
628
+ [ Fri Sep 16 02:57:56 2022 ] Eval epoch: 95
629
+ [ Fri Sep 16 02:59:46 2022 ] Mean test loss of 930 batches: 2.600646495819092.
630
+ [ Fri Sep 16 02:59:47 2022 ] Top1: 54.43%
631
+ [ Fri Sep 16 02:59:47 2022 ] Top5: 80.92%
632
+ [ Fri Sep 16 02:59:47 2022 ] Training epoch: 96
633
+ [ Fri Sep 16 02:59:58 2022 ] Batch(9/162) done. Loss: 0.0091 lr:0.001000 network_time: 0.0254
634
+ [ Fri Sep 16 03:01:10 2022 ] Batch(109/162) done. Loss: 0.0059 lr:0.001000 network_time: 0.0275
635
+ [ Fri Sep 16 03:01:48 2022 ] Eval epoch: 96
636
+ [ Fri Sep 16 03:03:38 2022 ] Mean test loss of 930 batches: 2.594008207321167.
637
+ [ Fri Sep 16 03:03:38 2022 ] Top1: 54.50%
638
+ [ Fri Sep 16 03:03:39 2022 ] Top5: 81.09%
639
+ [ Fri Sep 16 03:03:39 2022 ] Training epoch: 97
640
+ [ Fri Sep 16 03:04:17 2022 ] Batch(47/162) done. Loss: 0.0162 lr:0.001000 network_time: 0.0313
641
+ [ Fri Sep 16 03:05:29 2022 ] Batch(147/162) done. Loss: 0.0074 lr:0.001000 network_time: 0.0307
642
+ [ Fri Sep 16 03:05:40 2022 ] Eval epoch: 97
643
+ [ Fri Sep 16 03:07:29 2022 ] Mean test loss of 930 batches: 2.56474232673645.
644
+ [ Fri Sep 16 03:07:30 2022 ] Top1: 54.66%
645
+ [ Fri Sep 16 03:07:30 2022 ] Top5: 81.15%
646
+ [ Fri Sep 16 03:07:30 2022 ] Training epoch: 98
647
+ [ Fri Sep 16 03:08:36 2022 ] Batch(85/162) done. Loss: 0.0135 lr:0.001000 network_time: 0.0295
648
+ [ Fri Sep 16 03:09:31 2022 ] Eval epoch: 98
649
+ [ Fri Sep 16 03:11:21 2022 ] Mean test loss of 930 batches: 2.5852057933807373.
650
+ [ Fri Sep 16 03:11:21 2022 ] Top1: 54.71%
651
+ [ Fri Sep 16 03:11:21 2022 ] Top5: 81.15%
652
+ [ Fri Sep 16 03:11:22 2022 ] Training epoch: 99
653
+ [ Fri Sep 16 03:11:42 2022 ] Batch(23/162) done. Loss: 0.0056 lr:0.001000 network_time: 0.0263
654
+ [ Fri Sep 16 03:12:55 2022 ] Batch(123/162) done. Loss: 0.0053 lr:0.001000 network_time: 0.0262
655
+ [ Fri Sep 16 03:13:22 2022 ] Eval epoch: 99
656
+ [ Fri Sep 16 03:15:11 2022 ] Mean test loss of 930 batches: 2.5815749168395996.
657
+ [ Fri Sep 16 03:15:12 2022 ] Top1: 54.67%
658
+ [ Fri Sep 16 03:15:12 2022 ] Top5: 81.28%
659
+ [ Fri Sep 16 03:15:13 2022 ] Training epoch: 100
660
+ [ Fri Sep 16 03:16:00 2022 ] Batch(61/162) done. Loss: 0.0195 lr:0.001000 network_time: 0.0220
661
+ [ Fri Sep 16 03:17:13 2022 ] Batch(161/162) done. Loss: 0.0064 lr:0.001000 network_time: 0.0269
662
+ [ Fri Sep 16 03:17:13 2022 ] Eval epoch: 100
663
+ [ Fri Sep 16 03:19:02 2022 ] Mean test loss of 930 batches: 2.606626510620117.
664
+ [ Fri Sep 16 03:19:03 2022 ] Top1: 54.28%
665
+ [ Fri Sep 16 03:19:03 2022 ] Top5: 80.92%
666
+ [ Fri Sep 16 03:19:04 2022 ] Training epoch: 101
667
+ [ Fri Sep 16 03:20:19 2022 ] Batch(99/162) done. Loss: 0.0070 lr:0.000100 network_time: 0.0313
668
+ [ Fri Sep 16 03:21:04 2022 ] Eval epoch: 101
669
+ [ Fri Sep 16 03:22:53 2022 ] Mean test loss of 930 batches: 2.575133800506592.
670
+ [ Fri Sep 16 03:22:54 2022 ] Top1: 54.92%
671
+ [ Fri Sep 16 03:22:54 2022 ] Top5: 81.40%
672
+ [ Fri Sep 16 03:22:54 2022 ] Training epoch: 102
673
+ [ Fri Sep 16 03:23:25 2022 ] Batch(37/162) done. Loss: 0.0085 lr:0.000100 network_time: 0.0315
674
+ [ Fri Sep 16 03:24:37 2022 ] Batch(137/162) done. Loss: 0.0057 lr:0.000100 network_time: 0.0299
675
+ [ Fri Sep 16 03:24:55 2022 ] Eval epoch: 102
676
+ [ Fri Sep 16 03:26:45 2022 ] Mean test loss of 930 batches: 2.5923445224761963.
677
+ [ Fri Sep 16 03:26:45 2022 ] Top1: 54.70%
678
+ [ Fri Sep 16 03:26:46 2022 ] Top5: 81.13%
679
+ [ Fri Sep 16 03:26:46 2022 ] Training epoch: 103
680
+ [ Fri Sep 16 03:27:44 2022 ] Batch(75/162) done. Loss: 0.0136 lr:0.000100 network_time: 0.0268
681
+ [ Fri Sep 16 03:28:47 2022 ] Eval epoch: 103
682
+ [ Fri Sep 16 03:30:36 2022 ] Mean test loss of 930 batches: 2.6140177249908447.
683
+ [ Fri Sep 16 03:30:37 2022 ] Top1: 54.28%
684
+ [ Fri Sep 16 03:30:37 2022 ] Top5: 81.05%
685
+ [ Fri Sep 16 03:30:38 2022 ] Training epoch: 104
686
+ [ Fri Sep 16 03:30:50 2022 ] Batch(13/162) done. Loss: 0.0077 lr:0.000100 network_time: 0.0275
687
+ [ Fri Sep 16 03:32:03 2022 ] Batch(113/162) done. Loss: 0.0058 lr:0.000100 network_time: 0.0257
688
+ [ Fri Sep 16 03:32:38 2022 ] Eval epoch: 104
689
+ [ Fri Sep 16 03:34:27 2022 ] Mean test loss of 930 batches: 2.602022647857666.
690
+ [ Fri Sep 16 03:34:27 2022 ] Top1: 54.47%
691
+ [ Fri Sep 16 03:34:28 2022 ] Top5: 81.10%
692
+ [ Fri Sep 16 03:34:28 2022 ] Training epoch: 105
693
+ [ Fri Sep 16 03:35:09 2022 ] Batch(51/162) done. Loss: 0.0099 lr:0.000100 network_time: 0.0324
694
+ [ Fri Sep 16 03:36:21 2022 ] Batch(151/162) done. Loss: 0.0118 lr:0.000100 network_time: 0.0259
695
+ [ Fri Sep 16 03:36:29 2022 ] Eval epoch: 105
696
+ [ Fri Sep 16 03:38:18 2022 ] Mean test loss of 930 batches: 2.5884015560150146.
697
+ [ Fri Sep 16 03:38:19 2022 ] Top1: 54.51%
698
+ [ Fri Sep 16 03:38:19 2022 ] Top5: 81.17%
699
+ [ Fri Sep 16 03:38:19 2022 ] Training epoch: 106
700
+ [ Fri Sep 16 03:39:27 2022 ] Batch(89/162) done. Loss: 0.0040 lr:0.000100 network_time: 0.0231
701
+ [ Fri Sep 16 03:40:20 2022 ] Eval epoch: 106
702
+ [ Fri Sep 16 03:42:09 2022 ] Mean test loss of 930 batches: 2.61933970451355.
703
+ [ Fri Sep 16 03:42:09 2022 ] Top1: 54.38%
704
+ [ Fri Sep 16 03:42:10 2022 ] Top5: 80.87%
705
+ [ Fri Sep 16 03:42:10 2022 ] Training epoch: 107
706
+ [ Fri Sep 16 03:42:33 2022 ] Batch(27/162) done. Loss: 0.0046 lr:0.000100 network_time: 0.0302
707
+ [ Fri Sep 16 03:43:45 2022 ] Batch(127/162) done. Loss: 0.0163 lr:0.000100 network_time: 0.0320
708
+ [ Fri Sep 16 03:44:10 2022 ] Eval epoch: 107
709
+ [ Fri Sep 16 03:45:59 2022 ] Mean test loss of 930 batches: 2.5805156230926514.
710
+ [ Fri Sep 16 03:46:00 2022 ] Top1: 54.50%
711
+ [ Fri Sep 16 03:46:00 2022 ] Top5: 81.09%
712
+ [ Fri Sep 16 03:46:01 2022 ] Training epoch: 108
713
+ [ Fri Sep 16 03:46:51 2022 ] Batch(65/162) done. Loss: 0.0039 lr:0.000100 network_time: 0.0311
714
+ [ Fri Sep 16 03:48:01 2022 ] Eval epoch: 108
715
+ [ Fri Sep 16 03:49:50 2022 ] Mean test loss of 930 batches: 2.622124433517456.
716
+ [ Fri Sep 16 03:49:51 2022 ] Top1: 54.37%
717
+ [ Fri Sep 16 03:49:51 2022 ] Top5: 81.01%
718
+ [ Fri Sep 16 03:49:51 2022 ] Training epoch: 109
719
+ [ Fri Sep 16 03:49:57 2022 ] Batch(3/162) done. Loss: 0.0086 lr:0.000100 network_time: 0.0315
720
+ [ Fri Sep 16 03:51:10 2022 ] Batch(103/162) done. Loss: 0.0456 lr:0.000100 network_time: 0.0265
721
+ [ Fri Sep 16 03:51:52 2022 ] Eval epoch: 109
722
+ [ Fri Sep 16 03:53:41 2022 ] Mean test loss of 930 batches: 2.5847461223602295.
723
+ [ Fri Sep 16 03:53:42 2022 ] Top1: 54.65%
724
+ [ Fri Sep 16 03:53:42 2022 ] Top5: 81.22%
725
+ [ Fri Sep 16 03:53:42 2022 ] Training epoch: 110
726
+ [ Fri Sep 16 03:54:16 2022 ] Batch(41/162) done. Loss: 0.0170 lr:0.000100 network_time: 0.0301
727
+ [ Fri Sep 16 03:55:28 2022 ] Batch(141/162) done. Loss: 0.0042 lr:0.000100 network_time: 0.0270
728
+ [ Fri Sep 16 03:55:43 2022 ] Eval epoch: 110
729
+ [ Fri Sep 16 03:57:32 2022 ] Mean test loss of 930 batches: 2.6004409790039062.
730
+ [ Fri Sep 16 03:57:33 2022 ] Top1: 54.47%
731
+ [ Fri Sep 16 03:57:33 2022 ] Top5: 80.99%
732
+ [ Fri Sep 16 03:57:33 2022 ] Training epoch: 111
733
+ [ Fri Sep 16 03:58:34 2022 ] Batch(79/162) done. Loss: 0.0052 lr:0.000100 network_time: 0.0327
734
+ [ Fri Sep 16 03:59:34 2022 ] Eval epoch: 111
735
+ [ Fri Sep 16 04:01:24 2022 ] Mean test loss of 930 batches: 2.621633291244507.
736
+ [ Fri Sep 16 04:01:24 2022 ] Top1: 54.11%
737
+ [ Fri Sep 16 04:01:25 2022 ] Top5: 80.90%
738
+ [ Fri Sep 16 04:01:25 2022 ] Training epoch: 112
739
+ [ Fri Sep 16 04:01:41 2022 ] Batch(17/162) done. Loss: 0.0093 lr:0.000100 network_time: 0.0266
740
+ [ Fri Sep 16 04:02:54 2022 ] Batch(117/162) done. Loss: 0.0104 lr:0.000100 network_time: 0.0268
741
+ [ Fri Sep 16 04:03:26 2022 ] Eval epoch: 112
742
+ [ Fri Sep 16 04:05:15 2022 ] Mean test loss of 930 batches: 2.5725698471069336.
743
+ [ Fri Sep 16 04:05:15 2022 ] Top1: 54.85%
744
+ [ Fri Sep 16 04:05:16 2022 ] Top5: 81.15%
745
+ [ Fri Sep 16 04:05:16 2022 ] Training epoch: 113
746
+ [ Fri Sep 16 04:05:59 2022 ] Batch(55/162) done. Loss: 0.0106 lr:0.000100 network_time: 0.0316
747
+ [ Fri Sep 16 04:07:12 2022 ] Batch(155/162) done. Loss: 0.0043 lr:0.000100 network_time: 0.0264
748
+ [ Fri Sep 16 04:07:16 2022 ] Eval epoch: 113
749
+ [ Fri Sep 16 04:09:06 2022 ] Mean test loss of 930 batches: 2.610018730163574.
750
+ [ Fri Sep 16 04:09:06 2022 ] Top1: 54.21%
751
+ [ Fri Sep 16 04:09:06 2022 ] Top5: 80.90%
752
+ [ Fri Sep 16 04:09:07 2022 ] Training epoch: 114
753
+ [ Fri Sep 16 04:10:18 2022 ] Batch(93/162) done. Loss: 0.0059 lr:0.000100 network_time: 0.0265
754
+ [ Fri Sep 16 04:11:07 2022 ] Eval epoch: 114
755
+ [ Fri Sep 16 04:12:57 2022 ] Mean test loss of 930 batches: 2.5909993648529053.
756
+ [ Fri Sep 16 04:12:57 2022 ] Top1: 54.70%
757
+ [ Fri Sep 16 04:12:58 2022 ] Top5: 81.17%
758
+ [ Fri Sep 16 04:12:58 2022 ] Training epoch: 115
759
+ [ Fri Sep 16 04:13:24 2022 ] Batch(31/162) done. Loss: 0.0038 lr:0.000100 network_time: 0.0312
760
+ [ Fri Sep 16 04:14:37 2022 ] Batch(131/162) done. Loss: 0.0082 lr:0.000100 network_time: 0.0268
761
+ [ Fri Sep 16 04:14:59 2022 ] Eval epoch: 115
762
+ [ Fri Sep 16 04:16:48 2022 ] Mean test loss of 930 batches: 2.6113758087158203.
763
+ [ Fri Sep 16 04:16:48 2022 ] Top1: 54.65%
764
+ [ Fri Sep 16 04:16:49 2022 ] Top5: 80.93%
765
+ [ Fri Sep 16 04:16:49 2022 ] Training epoch: 116
766
+ [ Fri Sep 16 04:17:42 2022 ] Batch(69/162) done. Loss: 0.0166 lr:0.000100 network_time: 0.0276
767
+ [ Fri Sep 16 04:18:49 2022 ] Eval epoch: 116
768
+ [ Fri Sep 16 04:20:38 2022 ] Mean test loss of 930 batches: 2.5807549953460693.
769
+ [ Fri Sep 16 04:20:39 2022 ] Top1: 54.80%
770
+ [ Fri Sep 16 04:20:39 2022 ] Top5: 81.31%
771
+ [ Fri Sep 16 04:20:39 2022 ] Training epoch: 117
772
+ [ Fri Sep 16 04:20:48 2022 ] Batch(7/162) done. Loss: 0.0119 lr:0.000100 network_time: 0.0555
773
+ [ Fri Sep 16 04:22:01 2022 ] Batch(107/162) done. Loss: 0.0034 lr:0.000100 network_time: 0.0301
774
+ [ Fri Sep 16 04:22:40 2022 ] Eval epoch: 117
775
+ [ Fri Sep 16 04:24:29 2022 ] Mean test loss of 930 batches: 2.5913569927215576.
776
+ [ Fri Sep 16 04:24:30 2022 ] Top1: 54.78%
777
+ [ Fri Sep 16 04:24:30 2022 ] Top5: 81.19%
778
+ [ Fri Sep 16 04:24:30 2022 ] Training epoch: 118
779
+ [ Fri Sep 16 04:25:07 2022 ] Batch(45/162) done. Loss: 0.0136 lr:0.000100 network_time: 0.0284
780
+ [ Fri Sep 16 04:26:19 2022 ] Batch(145/162) done. Loss: 0.0099 lr:0.000100 network_time: 0.0270
781
+ [ Fri Sep 16 04:26:31 2022 ] Eval epoch: 118
782
+ [ Fri Sep 16 04:28:20 2022 ] Mean test loss of 930 batches: 2.5927035808563232.
783
+ [ Fri Sep 16 04:28:21 2022 ] Top1: 54.50%
784
+ [ Fri Sep 16 04:28:21 2022 ] Top5: 81.17%
785
+ [ Fri Sep 16 04:28:21 2022 ] Training epoch: 119
786
+ [ Fri Sep 16 04:29:25 2022 ] Batch(83/162) done. Loss: 0.0105 lr:0.000100 network_time: 0.0313
787
+ [ Fri Sep 16 04:30:22 2022 ] Eval epoch: 119
788
+ [ Fri Sep 16 04:32:12 2022 ] Mean test loss of 930 batches: 2.582989454269409.
789
+ [ Fri Sep 16 04:32:12 2022 ] Top1: 54.89%
790
+ [ Fri Sep 16 04:32:12 2022 ] Top5: 81.16%
791
+ [ Fri Sep 16 04:32:13 2022 ] Training epoch: 120
792
+ [ Fri Sep 16 04:32:32 2022 ] Batch(21/162) done. Loss: 0.0411 lr:0.000100 network_time: 0.0279
793
+ [ Fri Sep 16 04:33:44 2022 ] Batch(121/162) done. Loss: 0.0056 lr:0.000100 network_time: 0.0327
794
+ [ Fri Sep 16 04:34:13 2022 ] Eval epoch: 120
795
+ [ Fri Sep 16 04:36:03 2022 ] Mean test loss of 930 batches: 2.5880775451660156.
796
+ [ Fri Sep 16 04:36:03 2022 ] Top1: 54.77%
797
+ [ Fri Sep 16 04:36:04 2022 ] Top5: 81.30%
798
+ [ Fri Sep 16 04:36:04 2022 ] Training epoch: 121
799
+ [ Fri Sep 16 04:36:50 2022 ] Batch(59/162) done. Loss: 0.0113 lr:0.000100 network_time: 0.0280
800
+ [ Fri Sep 16 04:38:03 2022 ] Batch(159/162) done. Loss: 0.0105 lr:0.000100 network_time: 0.0266
801
+ [ Fri Sep 16 04:38:04 2022 ] Eval epoch: 121
802
+ [ Fri Sep 16 04:39:53 2022 ] Mean test loss of 930 batches: 2.565481185913086.
803
+ [ Fri Sep 16 04:39:54 2022 ] Top1: 54.65%
804
+ [ Fri Sep 16 04:39:54 2022 ] Top5: 81.13%
805
+ [ Fri Sep 16 04:39:55 2022 ] Training epoch: 122
806
+ [ Fri Sep 16 04:41:08 2022 ] Batch(97/162) done. Loss: 0.0066 lr:0.000100 network_time: 0.0375
807
+ [ Fri Sep 16 04:41:55 2022 ] Eval epoch: 122
808
+ [ Fri Sep 16 04:43:44 2022 ] Mean test loss of 930 batches: 2.5854990482330322.
809
+ [ Fri Sep 16 04:43:44 2022 ] Top1: 54.53%
810
+ [ Fri Sep 16 04:43:45 2022 ] Top5: 81.14%
811
+ [ Fri Sep 16 04:43:45 2022 ] Training epoch: 123
812
+ [ Fri Sep 16 04:44:14 2022 ] Batch(35/162) done. Loss: 0.0107 lr:0.000100 network_time: 0.0266
813
+ [ Fri Sep 16 04:45:27 2022 ] Batch(135/162) done. Loss: 0.0059 lr:0.000100 network_time: 0.0329
814
+ [ Fri Sep 16 04:45:46 2022 ] Eval epoch: 123
815
+ [ Fri Sep 16 04:47:35 2022 ] Mean test loss of 930 batches: 2.5903828144073486.
816
+ [ Fri Sep 16 04:47:35 2022 ] Top1: 54.24%
817
+ [ Fri Sep 16 04:47:36 2022 ] Top5: 80.99%
818
+ [ Fri Sep 16 04:47:36 2022 ] Training epoch: 124
819
+ [ Fri Sep 16 04:48:32 2022 ] Batch(73/162) done. Loss: 0.0074 lr:0.000100 network_time: 0.0268
820
+ [ Fri Sep 16 04:49:36 2022 ] Eval epoch: 124
821
+ [ Fri Sep 16 04:51:26 2022 ] Mean test loss of 930 batches: 2.5589847564697266.
822
+ [ Fri Sep 16 04:51:26 2022 ] Top1: 55.00%
823
+ [ Fri Sep 16 04:51:26 2022 ] Top5: 81.28%
824
+ [ Fri Sep 16 04:51:27 2022 ] Training epoch: 125
825
+ [ Fri Sep 16 04:51:38 2022 ] Batch(11/162) done. Loss: 0.0028 lr:0.000100 network_time: 0.0296
826
+ [ Fri Sep 16 04:52:51 2022 ] Batch(111/162) done. Loss: 0.0088 lr:0.000100 network_time: 0.0270
827
+ [ Fri Sep 16 04:53:27 2022 ] Eval epoch: 125
828
+ [ Fri Sep 16 04:55:16 2022 ] Mean test loss of 930 batches: 2.583209991455078.
829
+ [ Fri Sep 16 04:55:16 2022 ] Top1: 54.80%
830
+ [ Fri Sep 16 04:55:17 2022 ] Top5: 81.28%
831
+ [ Fri Sep 16 04:55:17 2022 ] Training epoch: 126
832
+ [ Fri Sep 16 04:55:56 2022 ] Batch(49/162) done. Loss: 0.0033 lr:0.000100 network_time: 0.0266
833
+ [ Fri Sep 16 04:57:08 2022 ] Batch(149/162) done. Loss: 0.0096 lr:0.000100 network_time: 0.0266
834
+ [ Fri Sep 16 04:57:17 2022 ] Eval epoch: 126
835
+ [ Fri Sep 16 04:59:07 2022 ] Mean test loss of 930 batches: 2.584449052810669.
836
+ [ Fri Sep 16 04:59:08 2022 ] Top1: 54.66%
837
+ [ Fri Sep 16 04:59:08 2022 ] Top5: 81.07%
838
+ [ Fri Sep 16 04:59:08 2022 ] Training epoch: 127
839
+ [ Fri Sep 16 05:00:15 2022 ] Batch(87/162) done. Loss: 0.0107 lr:0.000100 network_time: 0.0302
840
+ [ Fri Sep 16 05:01:08 2022 ] Eval epoch: 127
841
+ [ Fri Sep 16 05:02:58 2022 ] Mean test loss of 930 batches: 2.5924336910247803.
842
+ [ Fri Sep 16 05:02:58 2022 ] Top1: 54.68%
843
+ [ Fri Sep 16 05:02:58 2022 ] Top5: 81.16%
844
+ [ Fri Sep 16 05:02:59 2022 ] Training epoch: 128
845
+ [ Fri Sep 16 05:03:21 2022 ] Batch(25/162) done. Loss: 0.0049 lr:0.000100 network_time: 0.0275
846
+ [ Fri Sep 16 05:04:33 2022 ] Batch(125/162) done. Loss: 0.0092 lr:0.000100 network_time: 0.0316
847
+ [ Fri Sep 16 05:04:59 2022 ] Eval epoch: 128
848
+ [ Fri Sep 16 05:06:48 2022 ] Mean test loss of 930 batches: 2.6165969371795654.
849
+ [ Fri Sep 16 05:06:49 2022 ] Top1: 54.43%
850
+ [ Fri Sep 16 05:06:49 2022 ] Top5: 80.89%
851
+ [ Fri Sep 16 05:06:49 2022 ] Training epoch: 129
852
+ [ Fri Sep 16 05:07:39 2022 ] Batch(63/162) done. Loss: 0.0040 lr:0.000100 network_time: 0.0267
853
+ [ Fri Sep 16 05:08:50 2022 ] Eval epoch: 129
854
+ [ Fri Sep 16 05:10:39 2022 ] Mean test loss of 930 batches: 2.5882484912872314.
855
+ [ Fri Sep 16 05:10:39 2022 ] Top1: 54.59%
856
+ [ Fri Sep 16 05:10:39 2022 ] Top5: 80.97%
857
+ [ Fri Sep 16 05:10:40 2022 ] Training epoch: 130
858
+ [ Fri Sep 16 05:10:44 2022 ] Batch(1/162) done. Loss: 0.0038 lr:0.000100 network_time: 0.0279
859
+ [ Fri Sep 16 05:11:56 2022 ] Batch(101/162) done. Loss: 0.0132 lr:0.000100 network_time: 0.0285
860
+ [ Fri Sep 16 05:12:40 2022 ] Eval epoch: 130
861
+ [ Fri Sep 16 05:14:29 2022 ] Mean test loss of 930 batches: 2.577380657196045.
862
+ [ Fri Sep 16 05:14:30 2022 ] Top1: 54.80%
863
+ [ Fri Sep 16 05:14:30 2022 ] Top5: 81.19%
864
+ [ Fri Sep 16 05:14:30 2022 ] Training epoch: 131
865
+ [ Fri Sep 16 05:15:02 2022 ] Batch(39/162) done. Loss: 0.0056 lr:0.000100 network_time: 0.0264
866
+ [ Fri Sep 16 05:16:15 2022 ] Batch(139/162) done. Loss: 0.0055 lr:0.000100 network_time: 0.0304
867
+ [ Fri Sep 16 05:16:31 2022 ] Eval epoch: 131
868
+ [ Fri Sep 16 05:18:19 2022 ] Mean test loss of 930 batches: 2.5679023265838623.
869
+ [ Fri Sep 16 05:18:20 2022 ] Top1: 54.80%
870
+ [ Fri Sep 16 05:18:20 2022 ] Top5: 81.27%
871
+ [ Fri Sep 16 05:18:21 2022 ] Training epoch: 132
872
+ [ Fri Sep 16 05:19:20 2022 ] Batch(77/162) done. Loss: 0.0100 lr:0.000100 network_time: 0.0267
873
+ [ Fri Sep 16 05:20:21 2022 ] Eval epoch: 132
874
+ [ Fri Sep 16 05:22:10 2022 ] Mean test loss of 930 batches: 2.5951454639434814.
875
+ [ Fri Sep 16 05:22:10 2022 ] Top1: 54.70%
876
+ [ Fri Sep 16 05:22:11 2022 ] Top5: 81.24%
877
+ [ Fri Sep 16 05:22:11 2022 ] Training epoch: 133
878
+ [ Fri Sep 16 05:22:26 2022 ] Batch(15/162) done. Loss: 0.0056 lr:0.000100 network_time: 0.0269
879
+ [ Fri Sep 16 05:23:38 2022 ] Batch(115/162) done. Loss: 0.0043 lr:0.000100 network_time: 0.0427
880
+ [ Fri Sep 16 05:24:12 2022 ] Eval epoch: 133
881
+ [ Fri Sep 16 05:26:00 2022 ] Mean test loss of 930 batches: 2.583064556121826.
882
+ [ Fri Sep 16 05:26:01 2022 ] Top1: 54.66%
883
+ [ Fri Sep 16 05:26:01 2022 ] Top5: 81.21%
884
+ [ Fri Sep 16 05:26:02 2022 ] Training epoch: 134
885
+ [ Fri Sep 16 05:26:43 2022 ] Batch(53/162) done. Loss: 0.0039 lr:0.000100 network_time: 0.0267
886
+ [ Fri Sep 16 05:27:56 2022 ] Batch(153/162) done. Loss: 0.0109 lr:0.000100 network_time: 0.0470
887
+ [ Fri Sep 16 05:28:02 2022 ] Eval epoch: 134
888
+ [ Fri Sep 16 05:29:51 2022 ] Mean test loss of 930 batches: 2.6534018516540527.
889
+ [ Fri Sep 16 05:29:52 2022 ] Top1: 53.97%
890
+ [ Fri Sep 16 05:29:52 2022 ] Top5: 80.83%
891
+ [ Fri Sep 16 05:29:52 2022 ] Training epoch: 135
892
+ [ Fri Sep 16 05:31:02 2022 ] Batch(91/162) done. Loss: 0.0041 lr:0.000100 network_time: 0.0265
893
+ [ Fri Sep 16 05:31:53 2022 ] Eval epoch: 135
894
+ [ Fri Sep 16 05:33:42 2022 ] Mean test loss of 930 batches: 2.5870189666748047.
895
+ [ Fri Sep 16 05:33:42 2022 ] Top1: 54.72%
896
+ [ Fri Sep 16 05:33:43 2022 ] Top5: 81.20%
897
+ [ Fri Sep 16 05:33:43 2022 ] Training epoch: 136
898
+ [ Fri Sep 16 05:34:08 2022 ] Batch(29/162) done. Loss: 0.0045 lr:0.000100 network_time: 0.0276
899
+ [ Fri Sep 16 05:35:20 2022 ] Batch(129/162) done. Loss: 0.0047 lr:0.000100 network_time: 0.0276
900
+ [ Fri Sep 16 05:35:44 2022 ] Eval epoch: 136
901
+ [ Fri Sep 16 05:37:33 2022 ] Mean test loss of 930 batches: 2.5728559494018555.
902
+ [ Fri Sep 16 05:37:33 2022 ] Top1: 54.68%
903
+ [ Fri Sep 16 05:37:33 2022 ] Top5: 81.26%
904
+ [ Fri Sep 16 05:37:34 2022 ] Training epoch: 137
905
+ [ Fri Sep 16 05:38:26 2022 ] Batch(67/162) done. Loss: 0.0056 lr:0.000100 network_time: 0.0306
906
+ [ Fri Sep 16 05:39:35 2022 ] Eval epoch: 137
907
+ [ Fri Sep 16 05:41:24 2022 ] Mean test loss of 930 batches: 2.589679002761841.
908
+ [ Fri Sep 16 05:41:24 2022 ] Top1: 54.66%
909
+ [ Fri Sep 16 05:41:25 2022 ] Top5: 81.09%
910
+ [ Fri Sep 16 05:41:25 2022 ] Training epoch: 138
911
+ [ Fri Sep 16 05:41:33 2022 ] Batch(5/162) done. Loss: 0.0089 lr:0.000100 network_time: 0.0311
912
+ [ Fri Sep 16 05:42:45 2022 ] Batch(105/162) done. Loss: 0.0056 lr:0.000100 network_time: 0.0315
913
+ [ Fri Sep 16 05:43:26 2022 ] Eval epoch: 138
914
+ [ Fri Sep 16 05:45:15 2022 ] Mean test loss of 930 batches: 2.5958192348480225.
915
+ [ Fri Sep 16 05:45:16 2022 ] Top1: 54.56%
916
+ [ Fri Sep 16 05:45:16 2022 ] Top5: 81.23%
917
+ [ Fri Sep 16 05:45:16 2022 ] Training epoch: 139
918
+ [ Fri Sep 16 05:45:52 2022 ] Batch(43/162) done. Loss: 0.0162 lr:0.000100 network_time: 0.0276
919
+ [ Fri Sep 16 05:47:04 2022 ] Batch(143/162) done. Loss: 0.0039 lr:0.000100 network_time: 0.0267
920
+ [ Fri Sep 16 05:47:18 2022 ] Eval epoch: 139
921
+ [ Fri Sep 16 05:49:07 2022 ] Mean test loss of 930 batches: 2.6090312004089355.
922
+ [ Fri Sep 16 05:49:07 2022 ] Top1: 54.48%
923
+ [ Fri Sep 16 05:49:08 2022 ] Top5: 81.00%
924
+ [ Fri Sep 16 05:49:08 2022 ] Training epoch: 140
925
+ [ Fri Sep 16 05:50:11 2022 ] Batch(81/162) done. Loss: 0.0042 lr:0.000100 network_time: 0.0277
926
+ [ Fri Sep 16 05:51:09 2022 ] Eval epoch: 140
927
+ [ Fri Sep 16 05:52:58 2022 ] Mean test loss of 930 batches: 2.596174716949463.
928
+ [ Fri Sep 16 05:52:59 2022 ] Top1: 54.44%
929
+ [ Fri Sep 16 05:52:59 2022 ] Top5: 81.09%
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_bone_xset/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_joint_motion_xset
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xset/train_joint_motion.yaml
5
+ device:
6
+ - 6
7
+ - 7
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_joint_motion_xset
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_joint_motion_xset
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:58c173a511eb08702adf595bc87ee42b1d1116f33936bf7c6b79babd070abd96
3
+ size 34946665
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/log.txt ADDED
@@ -0,0 +1,929 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 15 20:53:29 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_joint_motion_xset', 'model_saved_name': './save_models/ntu120_joint_motion_xset', 'Experiment_name': 'ntu120_joint_motion_xset', 'config': './config/ntu120_xset/train_joint_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [6, 7], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Thu Sep 15 20:53:29 2022 ] Training epoch: 1
5
+ [ Thu Sep 15 20:54:48 2022 ] Batch(99/162) done. Loss: 2.8867 lr:0.100000 network_time: 0.0295
6
+ [ Thu Sep 15 20:55:33 2022 ] Eval epoch: 1
7
+ [ Thu Sep 15 20:57:27 2022 ] Mean test loss of 930 batches: 5.0145583152771.
8
+ [ Thu Sep 15 20:57:27 2022 ] Top1: 12.30%
9
+ [ Thu Sep 15 20:57:27 2022 ] Top5: 33.00%
10
+ [ Thu Sep 15 20:57:28 2022 ] Training epoch: 2
11
+ [ Thu Sep 15 20:57:58 2022 ] Batch(37/162) done. Loss: 1.8791 lr:0.100000 network_time: 0.0267
12
+ [ Thu Sep 15 20:59:11 2022 ] Batch(137/162) done. Loss: 1.9913 lr:0.100000 network_time: 0.0547
13
+ [ Thu Sep 15 20:59:29 2022 ] Eval epoch: 2
14
+ [ Thu Sep 15 21:01:19 2022 ] Mean test loss of 930 batches: 5.094688892364502.
15
+ [ Thu Sep 15 21:01:20 2022 ] Top1: 15.49%
16
+ [ Thu Sep 15 21:01:20 2022 ] Top5: 39.50%
17
+ [ Thu Sep 15 21:01:20 2022 ] Training epoch: 3
18
+ [ Thu Sep 15 21:02:19 2022 ] Batch(75/162) done. Loss: 1.8336 lr:0.100000 network_time: 0.0302
19
+ [ Thu Sep 15 21:03:22 2022 ] Eval epoch: 3
20
+ [ Thu Sep 15 21:05:11 2022 ] Mean test loss of 930 batches: 3.7378287315368652.
21
+ [ Thu Sep 15 21:05:12 2022 ] Top1: 26.10%
22
+ [ Thu Sep 15 21:05:12 2022 ] Top5: 53.90%
23
+ [ Thu Sep 15 21:05:12 2022 ] Training epoch: 4
24
+ [ Thu Sep 15 21:05:25 2022 ] Batch(13/162) done. Loss: 1.5423 lr:0.100000 network_time: 0.0266
25
+ [ Thu Sep 15 21:06:38 2022 ] Batch(113/162) done. Loss: 1.6592 lr:0.100000 network_time: 0.0271
26
+ [ Thu Sep 15 21:07:13 2022 ] Eval epoch: 4
27
+ [ Thu Sep 15 21:09:04 2022 ] Mean test loss of 930 batches: 3.334031581878662.
28
+ [ Thu Sep 15 21:09:04 2022 ] Top1: 28.04%
29
+ [ Thu Sep 15 21:09:05 2022 ] Top5: 55.90%
30
+ [ Thu Sep 15 21:09:05 2022 ] Training epoch: 5
31
+ [ Thu Sep 15 21:09:46 2022 ] Batch(51/162) done. Loss: 1.5313 lr:0.100000 network_time: 0.0272
32
+ [ Thu Sep 15 21:10:58 2022 ] Batch(151/162) done. Loss: 1.5609 lr:0.100000 network_time: 0.0266
33
+ [ Thu Sep 15 21:11:06 2022 ] Eval epoch: 5
34
+ [ Thu Sep 15 21:12:56 2022 ] Mean test loss of 930 batches: 3.3112664222717285.
35
+ [ Thu Sep 15 21:12:56 2022 ] Top1: 31.94%
36
+ [ Thu Sep 15 21:12:57 2022 ] Top5: 61.28%
37
+ [ Thu Sep 15 21:12:57 2022 ] Training epoch: 6
38
+ [ Thu Sep 15 21:14:05 2022 ] Batch(89/162) done. Loss: 1.2561 lr:0.100000 network_time: 0.0255
39
+ [ Thu Sep 15 21:14:58 2022 ] Eval epoch: 6
40
+ [ Thu Sep 15 21:16:47 2022 ] Mean test loss of 930 batches: 3.7558095455169678.
41
+ [ Thu Sep 15 21:16:48 2022 ] Top1: 29.97%
42
+ [ Thu Sep 15 21:16:48 2022 ] Top5: 60.91%
43
+ [ Thu Sep 15 21:16:48 2022 ] Training epoch: 7
44
+ [ Thu Sep 15 21:17:12 2022 ] Batch(27/162) done. Loss: 1.4390 lr:0.100000 network_time: 0.0517
45
+ [ Thu Sep 15 21:18:24 2022 ] Batch(127/162) done. Loss: 0.6995 lr:0.100000 network_time: 0.0254
46
+ [ Thu Sep 15 21:18:49 2022 ] Eval epoch: 7
47
+ [ Thu Sep 15 21:20:39 2022 ] Mean test loss of 930 batches: 3.5645387172698975.
48
+ [ Thu Sep 15 21:20:39 2022 ] Top1: 34.81%
49
+ [ Thu Sep 15 21:20:40 2022 ] Top5: 65.19%
50
+ [ Thu Sep 15 21:20:40 2022 ] Training epoch: 8
51
+ [ Thu Sep 15 21:21:31 2022 ] Batch(65/162) done. Loss: 1.2151 lr:0.100000 network_time: 0.0459
52
+ [ Thu Sep 15 21:22:41 2022 ] Eval epoch: 8
53
+ [ Thu Sep 15 21:24:32 2022 ] Mean test loss of 930 batches: 3.345634937286377.
54
+ [ Thu Sep 15 21:24:32 2022 ] Top1: 37.25%
55
+ [ Thu Sep 15 21:24:33 2022 ] Top5: 66.65%
56
+ [ Thu Sep 15 21:24:33 2022 ] Training epoch: 9
57
+ [ Thu Sep 15 21:24:39 2022 ] Batch(3/162) done. Loss: 0.8029 lr:0.100000 network_time: 0.0305
58
+ [ Thu Sep 15 21:25:51 2022 ] Batch(103/162) done. Loss: 1.1043 lr:0.100000 network_time: 0.0442
59
+ [ Thu Sep 15 21:26:34 2022 ] Eval epoch: 9
60
+ [ Thu Sep 15 21:28:23 2022 ] Mean test loss of 930 batches: 3.5088706016540527.
61
+ [ Thu Sep 15 21:28:24 2022 ] Top1: 30.93%
62
+ [ Thu Sep 15 21:28:24 2022 ] Top5: 61.77%
63
+ [ Thu Sep 15 21:28:25 2022 ] Training epoch: 10
64
+ [ Thu Sep 15 21:28:58 2022 ] Batch(41/162) done. Loss: 0.9017 lr:0.100000 network_time: 0.0274
65
+ [ Thu Sep 15 21:30:11 2022 ] Batch(141/162) done. Loss: 1.0810 lr:0.100000 network_time: 0.0301
66
+ [ Thu Sep 15 21:30:26 2022 ] Eval epoch: 10
67
+ [ Thu Sep 15 21:32:16 2022 ] Mean test loss of 930 batches: 3.7960636615753174.
68
+ [ Thu Sep 15 21:32:16 2022 ] Top1: 37.34%
69
+ [ Thu Sep 15 21:32:17 2022 ] Top5: 68.48%
70
+ [ Thu Sep 15 21:32:17 2022 ] Training epoch: 11
71
+ [ Thu Sep 15 21:33:18 2022 ] Batch(79/162) done. Loss: 0.8720 lr:0.100000 network_time: 0.0274
72
+ [ Thu Sep 15 21:34:18 2022 ] Eval epoch: 11
73
+ [ Thu Sep 15 21:36:08 2022 ] Mean test loss of 930 batches: 3.196443796157837.
74
+ [ Thu Sep 15 21:36:09 2022 ] Top1: 37.54%
75
+ [ Thu Sep 15 21:36:09 2022 ] Top5: 66.85%
76
+ [ Thu Sep 15 21:36:09 2022 ] Training epoch: 12
77
+ [ Thu Sep 15 21:36:26 2022 ] Batch(17/162) done. Loss: 0.8167 lr:0.100000 network_time: 0.0310
78
+ [ Thu Sep 15 21:37:38 2022 ] Batch(117/162) done. Loss: 1.1508 lr:0.100000 network_time: 0.0308
79
+ [ Thu Sep 15 21:38:10 2022 ] Eval epoch: 12
80
+ [ Thu Sep 15 21:40:00 2022 ] Mean test loss of 930 batches: 3.6423709392547607.
81
+ [ Thu Sep 15 21:40:01 2022 ] Top1: 38.01%
82
+ [ Thu Sep 15 21:40:01 2022 ] Top5: 68.47%
83
+ [ Thu Sep 15 21:40:01 2022 ] Training epoch: 13
84
+ [ Thu Sep 15 21:40:45 2022 ] Batch(55/162) done. Loss: 0.6677 lr:0.100000 network_time: 0.0280
85
+ [ Thu Sep 15 21:41:58 2022 ] Batch(155/162) done. Loss: 0.8657 lr:0.100000 network_time: 0.0269
86
+ [ Thu Sep 15 21:42:02 2022 ] Eval epoch: 13
87
+ [ Thu Sep 15 21:43:52 2022 ] Mean test loss of 930 batches: 2.7829337120056152.
88
+ [ Thu Sep 15 21:43:53 2022 ] Top1: 43.47%
89
+ [ Thu Sep 15 21:43:53 2022 ] Top5: 75.32%
90
+ [ Thu Sep 15 21:43:53 2022 ] Training epoch: 14
91
+ [ Thu Sep 15 21:45:05 2022 ] Batch(93/162) done. Loss: 0.9714 lr:0.100000 network_time: 0.0326
92
+ [ Thu Sep 15 21:45:54 2022 ] Eval epoch: 14
93
+ [ Thu Sep 15 21:47:44 2022 ] Mean test loss of 930 batches: 2.5059609413146973.
94
+ [ Thu Sep 15 21:47:44 2022 ] Top1: 45.03%
95
+ [ Thu Sep 15 21:47:45 2022 ] Top5: 74.04%
96
+ [ Thu Sep 15 21:47:45 2022 ] Training epoch: 15
97
+ [ Thu Sep 15 21:48:11 2022 ] Batch(31/162) done. Loss: 0.5980 lr:0.100000 network_time: 0.0296
98
+ [ Thu Sep 15 21:49:24 2022 ] Batch(131/162) done. Loss: 0.6171 lr:0.100000 network_time: 0.0279
99
+ [ Thu Sep 15 21:49:46 2022 ] Eval epoch: 15
100
+ [ Thu Sep 15 21:51:37 2022 ] Mean test loss of 930 batches: 3.646928310394287.
101
+ [ Thu Sep 15 21:51:37 2022 ] Top1: 37.05%
102
+ [ Thu Sep 15 21:51:38 2022 ] Top5: 67.63%
103
+ [ Thu Sep 15 21:51:38 2022 ] Training epoch: 16
104
+ [ Thu Sep 15 21:52:32 2022 ] Batch(69/162) done. Loss: 0.7645 lr:0.100000 network_time: 0.0272
105
+ [ Thu Sep 15 21:53:39 2022 ] Eval epoch: 16
106
+ [ Thu Sep 15 21:55:29 2022 ] Mean test loss of 930 batches: 3.679865837097168.
107
+ [ Thu Sep 15 21:55:29 2022 ] Top1: 42.20%
108
+ [ Thu Sep 15 21:55:30 2022 ] Top5: 71.60%
109
+ [ Thu Sep 15 21:55:30 2022 ] Training epoch: 17
110
+ [ Thu Sep 15 21:55:39 2022 ] Batch(7/162) done. Loss: 0.8176 lr:0.100000 network_time: 0.0273
111
+ [ Thu Sep 15 21:56:52 2022 ] Batch(107/162) done. Loss: 0.8370 lr:0.100000 network_time: 0.0282
112
+ [ Thu Sep 15 21:57:31 2022 ] Eval epoch: 17
113
+ [ Thu Sep 15 21:59:21 2022 ] Mean test loss of 930 batches: 3.4497668743133545.
114
+ [ Thu Sep 15 21:59:21 2022 ] Top1: 42.82%
115
+ [ Thu Sep 15 21:59:22 2022 ] Top5: 72.71%
116
+ [ Thu Sep 15 21:59:22 2022 ] Training epoch: 18
117
+ [ Thu Sep 15 21:59:58 2022 ] Batch(45/162) done. Loss: 0.6977 lr:0.100000 network_time: 0.0271
118
+ [ Thu Sep 15 22:01:11 2022 ] Batch(145/162) done. Loss: 0.4468 lr:0.100000 network_time: 0.0284
119
+ [ Thu Sep 15 22:01:23 2022 ] Eval epoch: 18
120
+ [ Thu Sep 15 22:03:13 2022 ] Mean test loss of 930 batches: 3.5319061279296875.
121
+ [ Thu Sep 15 22:03:13 2022 ] Top1: 37.75%
122
+ [ Thu Sep 15 22:03:13 2022 ] Top5: 69.26%
123
+ [ Thu Sep 15 22:03:14 2022 ] Training epoch: 19
124
+ [ Thu Sep 15 22:04:18 2022 ] Batch(83/162) done. Loss: 0.2785 lr:0.100000 network_time: 0.0272
125
+ [ Thu Sep 15 22:05:15 2022 ] Eval epoch: 19
126
+ [ Thu Sep 15 22:07:05 2022 ] Mean test loss of 930 batches: 2.7543692588806152.
127
+ [ Thu Sep 15 22:07:05 2022 ] Top1: 45.21%
128
+ [ Thu Sep 15 22:07:06 2022 ] Top5: 76.62%
129
+ [ Thu Sep 15 22:07:06 2022 ] Training epoch: 20
130
+ [ Thu Sep 15 22:07:25 2022 ] Batch(21/162) done. Loss: 0.7290 lr:0.100000 network_time: 0.0274
131
+ [ Thu Sep 15 22:08:38 2022 ] Batch(121/162) done. Loss: 0.6042 lr:0.100000 network_time: 0.0347
132
+ [ Thu Sep 15 22:09:07 2022 ] Eval epoch: 20
133
+ [ Thu Sep 15 22:10:57 2022 ] Mean test loss of 930 batches: 3.4957778453826904.
134
+ [ Thu Sep 15 22:10:57 2022 ] Top1: 38.53%
135
+ [ Thu Sep 15 22:10:58 2022 ] Top5: 70.23%
136
+ [ Thu Sep 15 22:10:58 2022 ] Training epoch: 21
137
+ [ Thu Sep 15 22:11:45 2022 ] Batch(59/162) done. Loss: 0.5559 lr:0.100000 network_time: 0.0251
138
+ [ Thu Sep 15 22:12:58 2022 ] Batch(159/162) done. Loss: 0.7466 lr:0.100000 network_time: 0.0319
139
+ [ Thu Sep 15 22:12:59 2022 ] Eval epoch: 21
140
+ [ Thu Sep 15 22:14:48 2022 ] Mean test loss of 930 batches: 2.8289051055908203.
141
+ [ Thu Sep 15 22:14:49 2022 ] Top1: 45.09%
142
+ [ Thu Sep 15 22:14:49 2022 ] Top5: 74.70%
143
+ [ Thu Sep 15 22:14:50 2022 ] Training epoch: 22
144
+ [ Thu Sep 15 22:16:04 2022 ] Batch(97/162) done. Loss: 0.4766 lr:0.100000 network_time: 0.0273
145
+ [ Thu Sep 15 22:16:51 2022 ] Eval epoch: 22
146
+ [ Thu Sep 15 22:18:44 2022 ] Mean test loss of 930 batches: 3.316596746444702.
147
+ [ Thu Sep 15 22:18:45 2022 ] Top1: 43.31%
148
+ [ Thu Sep 15 22:18:45 2022 ] Top5: 72.96%
149
+ [ Thu Sep 15 22:18:46 2022 ] Training epoch: 23
150
+ [ Thu Sep 15 22:19:19 2022 ] Batch(35/162) done. Loss: 0.3969 lr:0.100000 network_time: 0.0273
151
+ [ Thu Sep 15 22:20:42 2022 ] Batch(135/162) done. Loss: 0.2837 lr:0.100000 network_time: 0.0277
152
+ [ Thu Sep 15 22:21:04 2022 ] Eval epoch: 23
153
+ [ Thu Sep 15 22:23:08 2022 ] Mean test loss of 930 batches: 3.197326421737671.
154
+ [ Thu Sep 15 22:23:08 2022 ] Top1: 42.93%
155
+ [ Thu Sep 15 22:23:09 2022 ] Top5: 71.72%
156
+ [ Thu Sep 15 22:23:09 2022 ] Training epoch: 24
157
+ [ Thu Sep 15 22:24:14 2022 ] Batch(73/162) done. Loss: 0.2647 lr:0.100000 network_time: 0.0311
158
+ [ Thu Sep 15 22:25:28 2022 ] Eval epoch: 24
159
+ [ Thu Sep 15 22:27:31 2022 ] Mean test loss of 930 batches: 3.7576260566711426.
160
+ [ Thu Sep 15 22:27:32 2022 ] Top1: 33.71%
161
+ [ Thu Sep 15 22:27:32 2022 ] Top5: 64.81%
162
+ [ Thu Sep 15 22:27:32 2022 ] Training epoch: 25
163
+ [ Thu Sep 15 22:27:46 2022 ] Batch(11/162) done. Loss: 0.2705 lr:0.100000 network_time: 0.0271
164
+ [ Thu Sep 15 22:29:10 2022 ] Batch(111/162) done. Loss: 0.6018 lr:0.100000 network_time: 0.0302
165
+ [ Thu Sep 15 22:29:52 2022 ] Eval epoch: 25
166
+ [ Thu Sep 15 22:31:55 2022 ] Mean test loss of 930 batches: 3.1339001655578613.
167
+ [ Thu Sep 15 22:31:55 2022 ] Top1: 43.27%
168
+ [ Thu Sep 15 22:31:56 2022 ] Top5: 72.67%
169
+ [ Thu Sep 15 22:31:56 2022 ] Training epoch: 26
170
+ [ Thu Sep 15 22:32:41 2022 ] Batch(49/162) done. Loss: 0.3780 lr:0.100000 network_time: 0.0269
171
+ [ Thu Sep 15 22:34:05 2022 ] Batch(149/162) done. Loss: 0.4230 lr:0.100000 network_time: 0.0272
172
+ [ Thu Sep 15 22:34:15 2022 ] Eval epoch: 26
173
+ [ Thu Sep 15 22:36:19 2022 ] Mean test loss of 930 batches: 2.8259902000427246.
174
+ [ Thu Sep 15 22:36:20 2022 ] Top1: 47.37%
175
+ [ Thu Sep 15 22:36:20 2022 ] Top5: 77.19%
176
+ [ Thu Sep 15 22:36:20 2022 ] Training epoch: 27
177
+ [ Thu Sep 15 22:37:37 2022 ] Batch(87/162) done. Loss: 0.1814 lr:0.100000 network_time: 0.0356
178
+ [ Thu Sep 15 22:38:40 2022 ] Eval epoch: 27
179
+ [ Thu Sep 15 22:40:43 2022 ] Mean test loss of 930 batches: 3.3106560707092285.
180
+ [ Thu Sep 15 22:40:43 2022 ] Top1: 41.29%
181
+ [ Thu Sep 15 22:40:44 2022 ] Top5: 71.07%
182
+ [ Thu Sep 15 22:40:44 2022 ] Training epoch: 28
183
+ [ Thu Sep 15 22:41:09 2022 ] Batch(25/162) done. Loss: 0.3237 lr:0.100000 network_time: 0.0281
184
+ [ Thu Sep 15 22:42:29 2022 ] Batch(125/162) done. Loss: 0.4479 lr:0.100000 network_time: 0.0267
185
+ [ Thu Sep 15 22:42:56 2022 ] Eval epoch: 28
186
+ [ Thu Sep 15 22:44:46 2022 ] Mean test loss of 930 batches: 3.0452048778533936.
187
+ [ Thu Sep 15 22:44:47 2022 ] Top1: 44.39%
188
+ [ Thu Sep 15 22:44:47 2022 ] Top5: 73.71%
189
+ [ Thu Sep 15 22:44:47 2022 ] Training epoch: 29
190
+ [ Thu Sep 15 22:45:37 2022 ] Batch(63/162) done. Loss: 0.4210 lr:0.100000 network_time: 0.0295
191
+ [ Thu Sep 15 22:46:49 2022 ] Eval epoch: 29
192
+ [ Thu Sep 15 22:48:38 2022 ] Mean test loss of 930 batches: 3.6561460494995117.
193
+ [ Thu Sep 15 22:48:38 2022 ] Top1: 42.24%
194
+ [ Thu Sep 15 22:48:39 2022 ] Top5: 71.71%
195
+ [ Thu Sep 15 22:48:39 2022 ] Training epoch: 30
196
+ [ Thu Sep 15 22:48:44 2022 ] Batch(1/162) done. Loss: 0.1475 lr:0.100000 network_time: 0.0268
197
+ [ Thu Sep 15 22:49:56 2022 ] Batch(101/162) done. Loss: 0.3633 lr:0.100000 network_time: 0.0277
198
+ [ Thu Sep 15 22:50:40 2022 ] Eval epoch: 30
199
+ [ Thu Sep 15 22:52:30 2022 ] Mean test loss of 930 batches: 3.4392001628875732.
200
+ [ Thu Sep 15 22:52:30 2022 ] Top1: 38.44%
201
+ [ Thu Sep 15 22:52:31 2022 ] Top5: 68.96%
202
+ [ Thu Sep 15 22:52:31 2022 ] Training epoch: 31
203
+ [ Thu Sep 15 22:53:03 2022 ] Batch(39/162) done. Loss: 0.2928 lr:0.100000 network_time: 0.0318
204
+ [ Thu Sep 15 22:54:16 2022 ] Batch(139/162) done. Loss: 0.3484 lr:0.100000 network_time: 0.0273
205
+ [ Thu Sep 15 22:54:32 2022 ] Eval epoch: 31
206
+ [ Thu Sep 15 22:56:22 2022 ] Mean test loss of 930 batches: 3.405987024307251.
207
+ [ Thu Sep 15 22:56:23 2022 ] Top1: 44.97%
208
+ [ Thu Sep 15 22:56:23 2022 ] Top5: 74.00%
209
+ [ Thu Sep 15 22:56:24 2022 ] Training epoch: 32
210
+ [ Thu Sep 15 22:57:23 2022 ] Batch(77/162) done. Loss: 0.3654 lr:0.100000 network_time: 0.0306
211
+ [ Thu Sep 15 22:58:24 2022 ] Eval epoch: 32
212
+ [ Thu Sep 15 23:00:14 2022 ] Mean test loss of 930 batches: 3.289426326751709.
213
+ [ Thu Sep 15 23:00:14 2022 ] Top1: 44.54%
214
+ [ Thu Sep 15 23:00:15 2022 ] Top5: 72.69%
215
+ [ Thu Sep 15 23:00:15 2022 ] Training epoch: 33
216
+ [ Thu Sep 15 23:00:30 2022 ] Batch(15/162) done. Loss: 0.2446 lr:0.100000 network_time: 0.0309
217
+ [ Thu Sep 15 23:01:43 2022 ] Batch(115/162) done. Loss: 0.6636 lr:0.100000 network_time: 0.0308
218
+ [ Thu Sep 15 23:02:16 2022 ] Eval epoch: 33
219
+ [ Thu Sep 15 23:04:06 2022 ] Mean test loss of 930 batches: 3.5604958534240723.
220
+ [ Thu Sep 15 23:04:06 2022 ] Top1: 43.77%
221
+ [ Thu Sep 15 23:04:07 2022 ] Top5: 73.33%
222
+ [ Thu Sep 15 23:04:07 2022 ] Training epoch: 34
223
+ [ Thu Sep 15 23:04:49 2022 ] Batch(53/162) done. Loss: 0.2863 lr:0.100000 network_time: 0.0322
224
+ [ Thu Sep 15 23:06:02 2022 ] Batch(153/162) done. Loss: 0.3649 lr:0.100000 network_time: 0.0314
225
+ [ Thu Sep 15 23:06:08 2022 ] Eval epoch: 34
226
+ [ Thu Sep 15 23:07:57 2022 ] Mean test loss of 930 batches: 3.1730799674987793.
227
+ [ Thu Sep 15 23:07:58 2022 ] Top1: 45.38%
228
+ [ Thu Sep 15 23:07:58 2022 ] Top5: 75.41%
229
+ [ Thu Sep 15 23:07:58 2022 ] Training epoch: 35
230
+ [ Thu Sep 15 23:09:08 2022 ] Batch(91/162) done. Loss: 0.2954 lr:0.100000 network_time: 0.0274
231
+ [ Thu Sep 15 23:09:59 2022 ] Eval epoch: 35
232
+ [ Thu Sep 15 23:11:49 2022 ] Mean test loss of 930 batches: 4.07279634475708.
233
+ [ Thu Sep 15 23:11:49 2022 ] Top1: 35.21%
234
+ [ Thu Sep 15 23:11:50 2022 ] Top5: 66.58%
235
+ [ Thu Sep 15 23:11:50 2022 ] Training epoch: 36
236
+ [ Thu Sep 15 23:12:15 2022 ] Batch(29/162) done. Loss: 0.2734 lr:0.100000 network_time: 0.0303
237
+ [ Thu Sep 15 23:13:28 2022 ] Batch(129/162) done. Loss: 0.2994 lr:0.100000 network_time: 0.0271
238
+ [ Thu Sep 15 23:13:51 2022 ] Eval epoch: 36
239
+ [ Thu Sep 15 23:15:41 2022 ] Mean test loss of 930 batches: 3.6304514408111572.
240
+ [ Thu Sep 15 23:15:41 2022 ] Top1: 45.32%
241
+ [ Thu Sep 15 23:15:42 2022 ] Top5: 75.04%
242
+ [ Thu Sep 15 23:15:42 2022 ] Training epoch: 37
243
+ [ Thu Sep 15 23:16:35 2022 ] Batch(67/162) done. Loss: 0.2511 lr:0.100000 network_time: 0.0298
244
+ [ Thu Sep 15 23:17:43 2022 ] Eval epoch: 37
245
+ [ Thu Sep 15 23:19:33 2022 ] Mean test loss of 930 batches: 3.6294195652008057.
246
+ [ Thu Sep 15 23:19:33 2022 ] Top1: 44.20%
247
+ [ Thu Sep 15 23:19:33 2022 ] Top5: 73.36%
248
+ [ Thu Sep 15 23:19:34 2022 ] Training epoch: 38
249
+ [ Thu Sep 15 23:19:41 2022 ] Batch(5/162) done. Loss: 0.2162 lr:0.100000 network_time: 0.0284
250
+ [ Thu Sep 15 23:20:54 2022 ] Batch(105/162) done. Loss: 0.1641 lr:0.100000 network_time: 0.0281
251
+ [ Thu Sep 15 23:21:35 2022 ] Eval epoch: 38
252
+ [ Thu Sep 15 23:23:24 2022 ] Mean test loss of 930 batches: 3.811417818069458.
253
+ [ Thu Sep 15 23:23:25 2022 ] Top1: 46.29%
254
+ [ Thu Sep 15 23:23:25 2022 ] Top5: 74.94%
255
+ [ Thu Sep 15 23:23:26 2022 ] Training epoch: 39
256
+ [ Thu Sep 15 23:24:01 2022 ] Batch(43/162) done. Loss: 0.1922 lr:0.100000 network_time: 0.0274
257
+ [ Thu Sep 15 23:25:13 2022 ] Batch(143/162) done. Loss: 0.4354 lr:0.100000 network_time: 0.0301
258
+ [ Thu Sep 15 23:25:27 2022 ] Eval epoch: 39
259
+ [ Thu Sep 15 23:27:16 2022 ] Mean test loss of 930 batches: 4.040771484375.
260
+ [ Thu Sep 15 23:27:17 2022 ] Top1: 39.84%
261
+ [ Thu Sep 15 23:27:17 2022 ] Top5: 69.56%
262
+ [ Thu Sep 15 23:27:18 2022 ] Training epoch: 40
263
+ [ Thu Sep 15 23:28:20 2022 ] Batch(81/162) done. Loss: 0.4334 lr:0.100000 network_time: 0.0281
264
+ [ Thu Sep 15 23:29:19 2022 ] Eval epoch: 40
265
+ [ Thu Sep 15 23:31:10 2022 ] Mean test loss of 930 batches: 3.5953073501586914.
266
+ [ Thu Sep 15 23:31:10 2022 ] Top1: 41.93%
267
+ [ Thu Sep 15 23:31:11 2022 ] Top5: 72.07%
268
+ [ Thu Sep 15 23:31:11 2022 ] Training epoch: 41
269
+ [ Thu Sep 15 23:31:29 2022 ] Batch(19/162) done. Loss: 0.1604 lr:0.100000 network_time: 0.0274
270
+ [ Thu Sep 15 23:32:46 2022 ] Batch(119/162) done. Loss: 0.3937 lr:0.100000 network_time: 0.0284
271
+ [ Thu Sep 15 23:33:17 2022 ] Eval epoch: 41
272
+ [ Thu Sep 15 23:35:09 2022 ] Mean test loss of 930 batches: 3.520280122756958.
273
+ [ Thu Sep 15 23:35:09 2022 ] Top1: 47.59%
274
+ [ Thu Sep 15 23:35:10 2022 ] Top5: 75.64%
275
+ [ Thu Sep 15 23:35:10 2022 ] Training epoch: 42
276
+ [ Thu Sep 15 23:35:57 2022 ] Batch(57/162) done. Loss: 0.1668 lr:0.100000 network_time: 0.0282
277
+ [ Thu Sep 15 23:37:09 2022 ] Batch(157/162) done. Loss: 0.2388 lr:0.100000 network_time: 0.0286
278
+ [ Thu Sep 15 23:37:12 2022 ] Eval epoch: 42
279
+ [ Thu Sep 15 23:39:02 2022 ] Mean test loss of 930 batches: 4.183106899261475.
280
+ [ Thu Sep 15 23:39:03 2022 ] Top1: 44.28%
281
+ [ Thu Sep 15 23:39:03 2022 ] Top5: 72.06%
282
+ [ Thu Sep 15 23:39:03 2022 ] Training epoch: 43
283
+ [ Thu Sep 15 23:40:17 2022 ] Batch(95/162) done. Loss: 0.2275 lr:0.100000 network_time: 0.0315
284
+ [ Thu Sep 15 23:41:05 2022 ] Eval epoch: 43
285
+ [ Thu Sep 15 23:42:55 2022 ] Mean test loss of 930 batches: 3.718573570251465.
286
+ [ Thu Sep 15 23:42:55 2022 ] Top1: 45.26%
287
+ [ Thu Sep 15 23:42:56 2022 ] Top5: 74.70%
288
+ [ Thu Sep 15 23:42:56 2022 ] Training epoch: 44
289
+ [ Thu Sep 15 23:43:24 2022 ] Batch(33/162) done. Loss: 0.1599 lr:0.100000 network_time: 0.0259
290
+ [ Thu Sep 15 23:44:36 2022 ] Batch(133/162) done. Loss: 0.3298 lr:0.100000 network_time: 0.0262
291
+ [ Thu Sep 15 23:44:57 2022 ] Eval epoch: 44
292
+ [ Thu Sep 15 23:46:47 2022 ] Mean test loss of 930 batches: 3.5274219512939453.
293
+ [ Thu Sep 15 23:46:47 2022 ] Top1: 45.58%
294
+ [ Thu Sep 15 23:46:48 2022 ] Top5: 74.79%
295
+ [ Thu Sep 15 23:46:48 2022 ] Training epoch: 45
296
+ [ Thu Sep 15 23:47:43 2022 ] Batch(71/162) done. Loss: 0.1845 lr:0.100000 network_time: 0.0271
297
+ [ Thu Sep 15 23:48:49 2022 ] Eval epoch: 45
298
+ [ Thu Sep 15 23:50:39 2022 ] Mean test loss of 930 batches: 3.1531949043273926.
299
+ [ Thu Sep 15 23:50:39 2022 ] Top1: 46.84%
300
+ [ Thu Sep 15 23:50:39 2022 ] Top5: 76.80%
301
+ [ Thu Sep 15 23:50:40 2022 ] Training epoch: 46
302
+ [ Thu Sep 15 23:50:50 2022 ] Batch(9/162) done. Loss: 0.1575 lr:0.100000 network_time: 0.0318
303
+ [ Thu Sep 15 23:52:03 2022 ] Batch(109/162) done. Loss: 0.1927 lr:0.100000 network_time: 0.0316
304
+ [ Thu Sep 15 23:52:41 2022 ] Eval epoch: 46
305
+ [ Thu Sep 15 23:54:31 2022 ] Mean test loss of 930 batches: 3.124375104904175.
306
+ [ Thu Sep 15 23:54:32 2022 ] Top1: 48.06%
307
+ [ Thu Sep 15 23:54:32 2022 ] Top5: 75.82%
308
+ [ Thu Sep 15 23:54:32 2022 ] Training epoch: 47
309
+ [ Thu Sep 15 23:55:10 2022 ] Batch(47/162) done. Loss: 0.1494 lr:0.100000 network_time: 0.0275
310
+ [ Thu Sep 15 23:56:23 2022 ] Batch(147/162) done. Loss: 0.1077 lr:0.100000 network_time: 0.0267
311
+ [ Thu Sep 15 23:56:33 2022 ] Eval epoch: 47
312
+ [ Thu Sep 15 23:58:23 2022 ] Mean test loss of 930 batches: 3.3888652324676514.
313
+ [ Thu Sep 15 23:58:23 2022 ] Top1: 46.95%
314
+ [ Thu Sep 15 23:58:24 2022 ] Top5: 75.08%
315
+ [ Thu Sep 15 23:58:24 2022 ] Training epoch: 48
316
+ [ Thu Sep 15 23:59:30 2022 ] Batch(85/162) done. Loss: 0.3500 lr:0.100000 network_time: 0.0327
317
+ [ Fri Sep 16 00:00:25 2022 ] Eval epoch: 48
318
+ [ Fri Sep 16 00:02:15 2022 ] Mean test loss of 930 batches: 3.2960240840911865.
319
+ [ Fri Sep 16 00:02:15 2022 ] Top1: 45.77%
320
+ [ Fri Sep 16 00:02:16 2022 ] Top5: 74.82%
321
+ [ Fri Sep 16 00:02:16 2022 ] Training epoch: 49
322
+ [ Fri Sep 16 00:02:37 2022 ] Batch(23/162) done. Loss: 0.1806 lr:0.100000 network_time: 0.0323
323
+ [ Fri Sep 16 00:03:49 2022 ] Batch(123/162) done. Loss: 0.2071 lr:0.100000 network_time: 0.0339
324
+ [ Fri Sep 16 00:04:17 2022 ] Eval epoch: 49
325
+ [ Fri Sep 16 00:06:07 2022 ] Mean test loss of 930 batches: 3.379523754119873.
326
+ [ Fri Sep 16 00:06:07 2022 ] Top1: 47.97%
327
+ [ Fri Sep 16 00:06:08 2022 ] Top5: 76.24%
328
+ [ Fri Sep 16 00:06:08 2022 ] Training epoch: 50
329
+ [ Fri Sep 16 00:06:56 2022 ] Batch(61/162) done. Loss: 0.1720 lr:0.100000 network_time: 0.0305
330
+ [ Fri Sep 16 00:08:09 2022 ] Batch(161/162) done. Loss: 0.1912 lr:0.100000 network_time: 0.0259
331
+ [ Fri Sep 16 00:08:09 2022 ] Eval epoch: 50
332
+ [ Fri Sep 16 00:09:58 2022 ] Mean test loss of 930 batches: 3.817385673522949.
333
+ [ Fri Sep 16 00:09:59 2022 ] Top1: 45.53%
334
+ [ Fri Sep 16 00:09:59 2022 ] Top5: 74.42%
335
+ [ Fri Sep 16 00:09:59 2022 ] Training epoch: 51
336
+ [ Fri Sep 16 00:11:15 2022 ] Batch(99/162) done. Loss: 0.3319 lr:0.100000 network_time: 0.0281
337
+ [ Fri Sep 16 00:12:00 2022 ] Eval epoch: 51
338
+ [ Fri Sep 16 00:13:50 2022 ] Mean test loss of 930 batches: 3.0376579761505127.
339
+ [ Fri Sep 16 00:13:50 2022 ] Top1: 46.97%
340
+ [ Fri Sep 16 00:13:51 2022 ] Top5: 77.13%
341
+ [ Fri Sep 16 00:13:51 2022 ] Training epoch: 52
342
+ [ Fri Sep 16 00:14:22 2022 ] Batch(37/162) done. Loss: 0.1760 lr:0.100000 network_time: 0.0277
343
+ [ Fri Sep 16 00:15:34 2022 ] Batch(137/162) done. Loss: 0.2859 lr:0.100000 network_time: 0.0304
344
+ [ Fri Sep 16 00:15:52 2022 ] Eval epoch: 52
345
+ [ Fri Sep 16 00:17:42 2022 ] Mean test loss of 930 batches: 2.975449800491333.
346
+ [ Fri Sep 16 00:17:42 2022 ] Top1: 48.78%
347
+ [ Fri Sep 16 00:17:43 2022 ] Top5: 76.65%
348
+ [ Fri Sep 16 00:17:43 2022 ] Training epoch: 53
349
+ [ Fri Sep 16 00:18:41 2022 ] Batch(75/162) done. Loss: 0.0967 lr:0.100000 network_time: 0.0260
350
+ [ Fri Sep 16 00:19:44 2022 ] Eval epoch: 53
351
+ [ Fri Sep 16 00:21:34 2022 ] Mean test loss of 930 batches: 3.7247111797332764.
352
+ [ Fri Sep 16 00:21:34 2022 ] Top1: 45.97%
353
+ [ Fri Sep 16 00:21:34 2022 ] Top5: 74.71%
354
+ [ Fri Sep 16 00:21:35 2022 ] Training epoch: 54
355
+ [ Fri Sep 16 00:21:48 2022 ] Batch(13/162) done. Loss: 0.1032 lr:0.100000 network_time: 0.0273
356
+ [ Fri Sep 16 00:23:01 2022 ] Batch(113/162) done. Loss: 0.2584 lr:0.100000 network_time: 0.0263
357
+ [ Fri Sep 16 00:23:36 2022 ] Eval epoch: 54
358
+ [ Fri Sep 16 00:25:26 2022 ] Mean test loss of 930 batches: 3.4403839111328125.
359
+ [ Fri Sep 16 00:25:26 2022 ] Top1: 46.47%
360
+ [ Fri Sep 16 00:25:26 2022 ] Top5: 74.89%
361
+ [ Fri Sep 16 00:25:27 2022 ] Training epoch: 55
362
+ [ Fri Sep 16 00:26:07 2022 ] Batch(51/162) done. Loss: 0.1137 lr:0.100000 network_time: 0.0272
363
+ [ Fri Sep 16 00:27:20 2022 ] Batch(151/162) done. Loss: 0.1669 lr:0.100000 network_time: 0.0255
364
+ [ Fri Sep 16 00:27:28 2022 ] Eval epoch: 55
365
+ [ Fri Sep 16 00:29:17 2022 ] Mean test loss of 930 batches: 3.3242578506469727.
366
+ [ Fri Sep 16 00:29:17 2022 ] Top1: 44.78%
367
+ [ Fri Sep 16 00:29:18 2022 ] Top5: 73.90%
368
+ [ Fri Sep 16 00:29:18 2022 ] Training epoch: 56
369
+ [ Fri Sep 16 00:30:27 2022 ] Batch(89/162) done. Loss: 0.1448 lr:0.100000 network_time: 0.0310
370
+ [ Fri Sep 16 00:31:19 2022 ] Eval epoch: 56
371
+ [ Fri Sep 16 00:33:10 2022 ] Mean test loss of 930 batches: 3.144345283508301.
372
+ [ Fri Sep 16 00:33:10 2022 ] Top1: 47.82%
373
+ [ Fri Sep 16 00:33:11 2022 ] Top5: 76.10%
374
+ [ Fri Sep 16 00:33:11 2022 ] Training epoch: 57
375
+ [ Fri Sep 16 00:33:35 2022 ] Batch(27/162) done. Loss: 0.2348 lr:0.100000 network_time: 0.0274
376
+ [ Fri Sep 16 00:34:47 2022 ] Batch(127/162) done. Loss: 0.1120 lr:0.100000 network_time: 0.0275
377
+ [ Fri Sep 16 00:35:12 2022 ] Eval epoch: 57
378
+ [ Fri Sep 16 00:37:02 2022 ] Mean test loss of 930 batches: 2.920133352279663.
379
+ [ Fri Sep 16 00:37:03 2022 ] Top1: 48.81%
380
+ [ Fri Sep 16 00:37:03 2022 ] Top5: 77.31%
381
+ [ Fri Sep 16 00:37:03 2022 ] Training epoch: 58
382
+ [ Fri Sep 16 00:37:54 2022 ] Batch(65/162) done. Loss: 0.3310 lr:0.100000 network_time: 0.0559
383
+ [ Fri Sep 16 00:39:05 2022 ] Eval epoch: 58
384
+ [ Fri Sep 16 00:40:55 2022 ] Mean test loss of 930 batches: 3.7737793922424316.
385
+ [ Fri Sep 16 00:40:55 2022 ] Top1: 43.11%
386
+ [ Fri Sep 16 00:40:56 2022 ] Top5: 70.58%
387
+ [ Fri Sep 16 00:40:56 2022 ] Training epoch: 59
388
+ [ Fri Sep 16 00:41:02 2022 ] Batch(3/162) done. Loss: 0.0720 lr:0.100000 network_time: 0.0316
389
+ [ Fri Sep 16 00:42:15 2022 ] Batch(103/162) done. Loss: 0.2959 lr:0.100000 network_time: 0.0284
390
+ [ Fri Sep 16 00:42:57 2022 ] Eval epoch: 59
391
+ [ Fri Sep 16 00:44:47 2022 ] Mean test loss of 930 batches: 3.2564449310302734.
392
+ [ Fri Sep 16 00:44:48 2022 ] Top1: 48.88%
393
+ [ Fri Sep 16 00:44:48 2022 ] Top5: 77.70%
394
+ [ Fri Sep 16 00:44:48 2022 ] Training epoch: 60
395
+ [ Fri Sep 16 00:45:22 2022 ] Batch(41/162) done. Loss: 0.1657 lr:0.100000 network_time: 0.0279
396
+ [ Fri Sep 16 00:46:35 2022 ] Batch(141/162) done. Loss: 0.1678 lr:0.100000 network_time: 0.0401
397
+ [ Fri Sep 16 00:46:50 2022 ] Eval epoch: 60
398
+ [ Fri Sep 16 00:48:40 2022 ] Mean test loss of 930 batches: 3.528254747390747.
399
+ [ Fri Sep 16 00:48:40 2022 ] Top1: 48.81%
400
+ [ Fri Sep 16 00:48:41 2022 ] Top5: 76.94%
401
+ [ Fri Sep 16 00:48:41 2022 ] Training epoch: 61
402
+ [ Fri Sep 16 00:49:42 2022 ] Batch(79/162) done. Loss: 0.1274 lr:0.010000 network_time: 0.0277
403
+ [ Fri Sep 16 00:50:42 2022 ] Eval epoch: 61
404
+ [ Fri Sep 16 00:52:32 2022 ] Mean test loss of 930 batches: 3.0551934242248535.
405
+ [ Fri Sep 16 00:52:32 2022 ] Top1: 52.95%
406
+ [ Fri Sep 16 00:52:33 2022 ] Top5: 80.04%
407
+ [ Fri Sep 16 00:52:33 2022 ] Training epoch: 62
408
+ [ Fri Sep 16 00:52:49 2022 ] Batch(17/162) done. Loss: 0.0102 lr:0.010000 network_time: 0.0322
409
+ [ Fri Sep 16 00:54:02 2022 ] Batch(117/162) done. Loss: 0.0097 lr:0.010000 network_time: 0.0277
410
+ [ Fri Sep 16 00:54:34 2022 ] Eval epoch: 62
411
+ [ Fri Sep 16 00:56:24 2022 ] Mean test loss of 930 batches: 2.7636544704437256.
412
+ [ Fri Sep 16 00:56:24 2022 ] Top1: 54.60%
413
+ [ Fri Sep 16 00:56:25 2022 ] Top5: 80.99%
414
+ [ Fri Sep 16 00:56:25 2022 ] Training epoch: 63
415
+ [ Fri Sep 16 00:57:09 2022 ] Batch(55/162) done. Loss: 0.0137 lr:0.010000 network_time: 0.0319
416
+ [ Fri Sep 16 00:58:22 2022 ] Batch(155/162) done. Loss: 0.0272 lr:0.010000 network_time: 0.0269
417
+ [ Fri Sep 16 00:58:26 2022 ] Eval epoch: 63
418
+ [ Fri Sep 16 01:00:16 2022 ] Mean test loss of 930 batches: 2.9170336723327637.
419
+ [ Fri Sep 16 01:00:17 2022 ] Top1: 54.68%
420
+ [ Fri Sep 16 01:00:17 2022 ] Top5: 81.01%
421
+ [ Fri Sep 16 01:00:18 2022 ] Training epoch: 64
422
+ [ Fri Sep 16 01:01:29 2022 ] Batch(93/162) done. Loss: 0.0298 lr:0.010000 network_time: 0.0277
423
+ [ Fri Sep 16 01:02:19 2022 ] Eval epoch: 64
424
+ [ Fri Sep 16 01:04:09 2022 ] Mean test loss of 930 batches: 2.8463385105133057.
425
+ [ Fri Sep 16 01:04:09 2022 ] Top1: 53.52%
426
+ [ Fri Sep 16 01:04:09 2022 ] Top5: 80.42%
427
+ [ Fri Sep 16 01:04:10 2022 ] Training epoch: 65
428
+ [ Fri Sep 16 01:04:36 2022 ] Batch(31/162) done. Loss: 0.0082 lr:0.010000 network_time: 0.0256
429
+ [ Fri Sep 16 01:05:49 2022 ] Batch(131/162) done. Loss: 0.0156 lr:0.010000 network_time: 0.0266
430
+ [ Fri Sep 16 01:06:11 2022 ] Eval epoch: 65
431
+ [ Fri Sep 16 01:08:01 2022 ] Mean test loss of 930 batches: 2.7530834674835205.
432
+ [ Fri Sep 16 01:08:01 2022 ] Top1: 54.70%
433
+ [ Fri Sep 16 01:08:02 2022 ] Top5: 81.44%
434
+ [ Fri Sep 16 01:08:02 2022 ] Training epoch: 66
435
+ [ Fri Sep 16 01:08:56 2022 ] Batch(69/162) done. Loss: 0.0330 lr:0.010000 network_time: 0.0282
436
+ [ Fri Sep 16 01:10:03 2022 ] Eval epoch: 66
437
+ [ Fri Sep 16 01:11:52 2022 ] Mean test loss of 930 batches: 2.835975408554077.
438
+ [ Fri Sep 16 01:11:53 2022 ] Top1: 54.82%
439
+ [ Fri Sep 16 01:11:53 2022 ] Top5: 81.14%
440
+ [ Fri Sep 16 01:11:53 2022 ] Training epoch: 67
441
+ [ Fri Sep 16 01:12:02 2022 ] Batch(7/162) done. Loss: 0.0208 lr:0.010000 network_time: 0.0344
442
+ [ Fri Sep 16 01:13:15 2022 ] Batch(107/162) done. Loss: 0.0075 lr:0.010000 network_time: 0.0270
443
+ [ Fri Sep 16 01:13:55 2022 ] Eval epoch: 67
444
+ [ Fri Sep 16 01:15:44 2022 ] Mean test loss of 930 batches: 2.7904841899871826.
445
+ [ Fri Sep 16 01:15:45 2022 ] Top1: 54.78%
446
+ [ Fri Sep 16 01:15:45 2022 ] Top5: 81.29%
447
+ [ Fri Sep 16 01:15:45 2022 ] Training epoch: 68
448
+ [ Fri Sep 16 01:16:22 2022 ] Batch(45/162) done. Loss: 0.0099 lr:0.010000 network_time: 0.0278
449
+ [ Fri Sep 16 01:17:35 2022 ] Batch(145/162) done. Loss: 0.0066 lr:0.010000 network_time: 0.0277
450
+ [ Fri Sep 16 01:17:47 2022 ] Eval epoch: 68
451
+ [ Fri Sep 16 01:19:36 2022 ] Mean test loss of 930 batches: 2.7633280754089355.
452
+ [ Fri Sep 16 01:19:37 2022 ] Top1: 54.63%
453
+ [ Fri Sep 16 01:19:37 2022 ] Top5: 81.23%
454
+ [ Fri Sep 16 01:19:38 2022 ] Training epoch: 69
455
+ [ Fri Sep 16 01:20:42 2022 ] Batch(83/162) done. Loss: 0.0115 lr:0.010000 network_time: 0.0280
456
+ [ Fri Sep 16 01:21:39 2022 ] Eval epoch: 69
457
+ [ Fri Sep 16 01:23:29 2022 ] Mean test loss of 930 batches: 2.717859983444214.
458
+ [ Fri Sep 16 01:23:29 2022 ] Top1: 54.13%
459
+ [ Fri Sep 16 01:23:30 2022 ] Top5: 80.91%
460
+ [ Fri Sep 16 01:23:30 2022 ] Training epoch: 70
461
+ [ Fri Sep 16 01:23:49 2022 ] Batch(21/162) done. Loss: 0.0243 lr:0.010000 network_time: 0.0261
462
+ [ Fri Sep 16 01:25:01 2022 ] Batch(121/162) done. Loss: 0.0246 lr:0.010000 network_time: 0.0312
463
+ [ Fri Sep 16 01:25:31 2022 ] Eval epoch: 70
464
+ [ Fri Sep 16 01:27:21 2022 ] Mean test loss of 930 batches: 2.8249881267547607.
465
+ [ Fri Sep 16 01:27:21 2022 ] Top1: 55.01%
466
+ [ Fri Sep 16 01:27:22 2022 ] Top5: 81.17%
467
+ [ Fri Sep 16 01:27:22 2022 ] Training epoch: 71
468
+ [ Fri Sep 16 01:28:09 2022 ] Batch(59/162) done. Loss: 0.0115 lr:0.010000 network_time: 0.0270
469
+ [ Fri Sep 16 01:29:21 2022 ] Batch(159/162) done. Loss: 0.0081 lr:0.010000 network_time: 0.0281
470
+ [ Fri Sep 16 01:29:23 2022 ] Eval epoch: 71
471
+ [ Fri Sep 16 01:31:13 2022 ] Mean test loss of 930 batches: 2.6873366832733154.
472
+ [ Fri Sep 16 01:31:14 2022 ] Top1: 54.78%
473
+ [ Fri Sep 16 01:31:14 2022 ] Top5: 81.29%
474
+ [ Fri Sep 16 01:31:14 2022 ] Training epoch: 72
475
+ [ Fri Sep 16 01:32:28 2022 ] Batch(97/162) done. Loss: 0.0124 lr:0.010000 network_time: 0.0311
476
+ [ Fri Sep 16 01:33:15 2022 ] Eval epoch: 72
477
+ [ Fri Sep 16 01:35:05 2022 ] Mean test loss of 930 batches: 2.838772773742676.
478
+ [ Fri Sep 16 01:35:06 2022 ] Top1: 55.18%
479
+ [ Fri Sep 16 01:35:06 2022 ] Top5: 81.33%
480
+ [ Fri Sep 16 01:35:06 2022 ] Training epoch: 73
481
+ [ Fri Sep 16 01:35:35 2022 ] Batch(35/162) done. Loss: 0.0055 lr:0.010000 network_time: 0.0290
482
+ [ Fri Sep 16 01:36:48 2022 ] Batch(135/162) done. Loss: 0.0075 lr:0.010000 network_time: 0.0303
483
+ [ Fri Sep 16 01:37:07 2022 ] Eval epoch: 73
484
+ [ Fri Sep 16 01:38:57 2022 ] Mean test loss of 930 batches: 2.8878660202026367.
485
+ [ Fri Sep 16 01:38:58 2022 ] Top1: 55.20%
486
+ [ Fri Sep 16 01:38:58 2022 ] Top5: 81.34%
487
+ [ Fri Sep 16 01:38:58 2022 ] Training epoch: 74
488
+ [ Fri Sep 16 01:39:55 2022 ] Batch(73/162) done. Loss: 0.0124 lr:0.010000 network_time: 0.0287
489
+ [ Fri Sep 16 01:40:59 2022 ] Eval epoch: 74
490
+ [ Fri Sep 16 01:42:49 2022 ] Mean test loss of 930 batches: 2.664907455444336.
491
+ [ Fri Sep 16 01:42:50 2022 ] Top1: 55.28%
492
+ [ Fri Sep 16 01:42:50 2022 ] Top5: 81.36%
493
+ [ Fri Sep 16 01:42:51 2022 ] Training epoch: 75
494
+ [ Fri Sep 16 01:43:02 2022 ] Batch(11/162) done. Loss: 0.0043 lr:0.010000 network_time: 0.0273
495
+ [ Fri Sep 16 01:44:15 2022 ] Batch(111/162) done. Loss: 0.0047 lr:0.010000 network_time: 0.0333
496
+ [ Fri Sep 16 01:44:52 2022 ] Eval epoch: 75
497
+ [ Fri Sep 16 01:46:41 2022 ] Mean test loss of 930 batches: 2.6683382987976074.
498
+ [ Fri Sep 16 01:46:42 2022 ] Top1: 54.45%
499
+ [ Fri Sep 16 01:46:42 2022 ] Top5: 80.98%
500
+ [ Fri Sep 16 01:46:42 2022 ] Training epoch: 76
501
+ [ Fri Sep 16 01:47:22 2022 ] Batch(49/162) done. Loss: 0.0093 lr:0.010000 network_time: 0.0279
502
+ [ Fri Sep 16 01:48:35 2022 ] Batch(149/162) done. Loss: 0.0145 lr:0.010000 network_time: 0.0253
503
+ [ Fri Sep 16 01:48:44 2022 ] Eval epoch: 76
504
+ [ Fri Sep 16 01:50:33 2022 ] Mean test loss of 930 batches: 2.8474338054656982.
505
+ [ Fri Sep 16 01:50:34 2022 ] Top1: 55.17%
506
+ [ Fri Sep 16 01:50:34 2022 ] Top5: 81.17%
507
+ [ Fri Sep 16 01:50:34 2022 ] Training epoch: 77
508
+ [ Fri Sep 16 01:51:42 2022 ] Batch(87/162) done. Loss: 0.0102 lr:0.010000 network_time: 0.0264
509
+ [ Fri Sep 16 01:52:36 2022 ] Eval epoch: 77
510
+ [ Fri Sep 16 01:54:25 2022 ] Mean test loss of 930 batches: 2.8973159790039062.
511
+ [ Fri Sep 16 01:54:26 2022 ] Top1: 55.34%
512
+ [ Fri Sep 16 01:54:26 2022 ] Top5: 81.56%
513
+ [ Fri Sep 16 01:54:26 2022 ] Training epoch: 78
514
+ [ Fri Sep 16 01:54:48 2022 ] Batch(25/162) done. Loss: 0.0102 lr:0.010000 network_time: 0.0296
515
+ [ Fri Sep 16 01:56:01 2022 ] Batch(125/162) done. Loss: 0.0087 lr:0.010000 network_time: 0.0272
516
+ [ Fri Sep 16 01:56:28 2022 ] Eval epoch: 78
517
+ [ Fri Sep 16 01:58:17 2022 ] Mean test loss of 930 batches: 2.6789896488189697.
518
+ [ Fri Sep 16 01:58:18 2022 ] Top1: 54.80%
519
+ [ Fri Sep 16 01:58:18 2022 ] Top5: 81.20%
520
+ [ Fri Sep 16 01:58:18 2022 ] Training epoch: 79
521
+ [ Fri Sep 16 01:59:08 2022 ] Batch(63/162) done. Loss: 0.0068 lr:0.010000 network_time: 0.0500
522
+ [ Fri Sep 16 02:00:19 2022 ] Eval epoch: 79
523
+ [ Fri Sep 16 02:02:09 2022 ] Mean test loss of 930 batches: 2.7858283519744873.
524
+ [ Fri Sep 16 02:02:09 2022 ] Top1: 54.91%
525
+ [ Fri Sep 16 02:02:10 2022 ] Top5: 81.21%
526
+ [ Fri Sep 16 02:02:10 2022 ] Training epoch: 80
527
+ [ Fri Sep 16 02:02:15 2022 ] Batch(1/162) done. Loss: 0.0035 lr:0.010000 network_time: 0.0303
528
+ [ Fri Sep 16 02:03:27 2022 ] Batch(101/162) done. Loss: 0.0141 lr:0.010000 network_time: 0.0399
529
+ [ Fri Sep 16 02:04:11 2022 ] Eval epoch: 80
530
+ [ Fri Sep 16 02:06:01 2022 ] Mean test loss of 930 batches: 2.8534252643585205.
531
+ [ Fri Sep 16 02:06:02 2022 ] Top1: 55.02%
532
+ [ Fri Sep 16 02:06:02 2022 ] Top5: 81.41%
533
+ [ Fri Sep 16 02:06:02 2022 ] Training epoch: 81
534
+ [ Fri Sep 16 02:06:34 2022 ] Batch(39/162) done. Loss: 0.0035 lr:0.001000 network_time: 0.0254
535
+ [ Fri Sep 16 02:07:47 2022 ] Batch(139/162) done. Loss: 0.0039 lr:0.001000 network_time: 0.0233
536
+ [ Fri Sep 16 02:08:03 2022 ] Eval epoch: 81
537
+ [ Fri Sep 16 02:09:52 2022 ] Mean test loss of 930 batches: 2.6803336143493652.
538
+ [ Fri Sep 16 02:09:53 2022 ] Top1: 55.20%
539
+ [ Fri Sep 16 02:09:53 2022 ] Top5: 81.48%
540
+ [ Fri Sep 16 02:09:53 2022 ] Training epoch: 82
541
+ [ Fri Sep 16 02:10:53 2022 ] Batch(77/162) done. Loss: 0.0049 lr:0.001000 network_time: 0.0262
542
+ [ Fri Sep 16 02:11:54 2022 ] Eval epoch: 82
543
+ [ Fri Sep 16 02:13:44 2022 ] Mean test loss of 930 batches: 2.7156152725219727.
544
+ [ Fri Sep 16 02:13:44 2022 ] Top1: 55.67%
545
+ [ Fri Sep 16 02:13:45 2022 ] Top5: 81.66%
546
+ [ Fri Sep 16 02:13:45 2022 ] Training epoch: 83
547
+ [ Fri Sep 16 02:14:00 2022 ] Batch(15/162) done. Loss: 0.0084 lr:0.001000 network_time: 0.0341
548
+ [ Fri Sep 16 02:15:12 2022 ] Batch(115/162) done. Loss: 0.0417 lr:0.001000 network_time: 0.0321
549
+ [ Fri Sep 16 02:15:46 2022 ] Eval epoch: 83
550
+ [ Fri Sep 16 02:17:36 2022 ] Mean test loss of 930 batches: 2.685029983520508.
551
+ [ Fri Sep 16 02:17:36 2022 ] Top1: 54.72%
552
+ [ Fri Sep 16 02:17:37 2022 ] Top5: 81.19%
553
+ [ Fri Sep 16 02:17:37 2022 ] Training epoch: 84
554
+ [ Fri Sep 16 02:18:19 2022 ] Batch(53/162) done. Loss: 0.0095 lr:0.001000 network_time: 0.0280
555
+ [ Fri Sep 16 02:19:32 2022 ] Batch(153/162) done. Loss: 0.0193 lr:0.001000 network_time: 0.0274
556
+ [ Fri Sep 16 02:19:38 2022 ] Eval epoch: 84
557
+ [ Fri Sep 16 02:21:28 2022 ] Mean test loss of 930 batches: 2.905918598175049.
558
+ [ Fri Sep 16 02:21:28 2022 ] Top1: 55.23%
559
+ [ Fri Sep 16 02:21:29 2022 ] Top5: 81.10%
560
+ [ Fri Sep 16 02:21:29 2022 ] Training epoch: 85
561
+ [ Fri Sep 16 02:22:39 2022 ] Batch(91/162) done. Loss: 0.0063 lr:0.001000 network_time: 0.0309
562
+ [ Fri Sep 16 02:23:30 2022 ] Eval epoch: 85
563
+ [ Fri Sep 16 02:25:20 2022 ] Mean test loss of 930 batches: 2.808967351913452.
564
+ [ Fri Sep 16 02:25:20 2022 ] Top1: 55.04%
565
+ [ Fri Sep 16 02:25:21 2022 ] Top5: 81.41%
566
+ [ Fri Sep 16 02:25:21 2022 ] Training epoch: 86
567
+ [ Fri Sep 16 02:25:46 2022 ] Batch(29/162) done. Loss: 0.0053 lr:0.001000 network_time: 0.0268
568
+ [ Fri Sep 16 02:26:59 2022 ] Batch(129/162) done. Loss: 0.0089 lr:0.001000 network_time: 0.0285
569
+ [ Fri Sep 16 02:27:22 2022 ] Eval epoch: 86
570
+ [ Fri Sep 16 02:29:12 2022 ] Mean test loss of 930 batches: 2.6798934936523438.
571
+ [ Fri Sep 16 02:29:12 2022 ] Top1: 54.68%
572
+ [ Fri Sep 16 02:29:13 2022 ] Top5: 81.19%
573
+ [ Fri Sep 16 02:29:13 2022 ] Training epoch: 87
574
+ [ Fri Sep 16 02:30:05 2022 ] Batch(67/162) done. Loss: 0.0089 lr:0.001000 network_time: 0.0313
575
+ [ Fri Sep 16 02:31:14 2022 ] Eval epoch: 87
576
+ [ Fri Sep 16 02:33:04 2022 ] Mean test loss of 930 batches: 2.7120673656463623.
577
+ [ Fri Sep 16 02:33:04 2022 ] Top1: 55.48%
578
+ [ Fri Sep 16 02:33:04 2022 ] Top5: 81.55%
579
+ [ Fri Sep 16 02:33:05 2022 ] Training epoch: 88
580
+ [ Fri Sep 16 02:33:12 2022 ] Batch(5/162) done. Loss: 0.0106 lr:0.001000 network_time: 0.0268
581
+ [ Fri Sep 16 02:34:25 2022 ] Batch(105/162) done. Loss: 0.0082 lr:0.001000 network_time: 0.0269
582
+ [ Fri Sep 16 02:35:06 2022 ] Eval epoch: 88
583
+ [ Fri Sep 16 02:36:55 2022 ] Mean test loss of 930 batches: 2.6429519653320312.
584
+ [ Fri Sep 16 02:36:55 2022 ] Top1: 55.39%
585
+ [ Fri Sep 16 02:36:56 2022 ] Top5: 81.70%
586
+ [ Fri Sep 16 02:36:56 2022 ] Training epoch: 89
587
+ [ Fri Sep 16 02:37:31 2022 ] Batch(43/162) done. Loss: 0.0084 lr:0.001000 network_time: 0.0294
588
+ [ Fri Sep 16 02:38:44 2022 ] Batch(143/162) done. Loss: 0.0078 lr:0.001000 network_time: 0.0392
589
+ [ Fri Sep 16 02:38:57 2022 ] Eval epoch: 89
590
+ [ Fri Sep 16 02:40:47 2022 ] Mean test loss of 930 batches: 2.7270517349243164.
591
+ [ Fri Sep 16 02:40:47 2022 ] Top1: 54.51%
592
+ [ Fri Sep 16 02:40:48 2022 ] Top5: 81.10%
593
+ [ Fri Sep 16 02:40:48 2022 ] Training epoch: 90
594
+ [ Fri Sep 16 02:41:50 2022 ] Batch(81/162) done. Loss: 0.0173 lr:0.001000 network_time: 0.0349
595
+ [ Fri Sep 16 02:42:49 2022 ] Eval epoch: 90
596
+ [ Fri Sep 16 02:44:39 2022 ] Mean test loss of 930 batches: 2.8094165325164795.
597
+ [ Fri Sep 16 02:44:39 2022 ] Top1: 54.99%
598
+ [ Fri Sep 16 02:44:40 2022 ] Top5: 81.39%
599
+ [ Fri Sep 16 02:44:40 2022 ] Training epoch: 91
600
+ [ Fri Sep 16 02:44:57 2022 ] Batch(19/162) done. Loss: 0.0077 lr:0.001000 network_time: 0.0302
601
+ [ Fri Sep 16 02:46:10 2022 ] Batch(119/162) done. Loss: 0.0108 lr:0.001000 network_time: 0.0271
602
+ [ Fri Sep 16 02:46:41 2022 ] Eval epoch: 91
603
+ [ Fri Sep 16 02:48:31 2022 ] Mean test loss of 930 batches: 2.7365074157714844.
604
+ [ Fri Sep 16 02:48:31 2022 ] Top1: 55.28%
605
+ [ Fri Sep 16 02:48:32 2022 ] Top5: 81.50%
606
+ [ Fri Sep 16 02:48:32 2022 ] Training epoch: 92
607
+ [ Fri Sep 16 02:49:17 2022 ] Batch(57/162) done. Loss: 0.0151 lr:0.001000 network_time: 0.0319
608
+ [ Fri Sep 16 02:50:30 2022 ] Batch(157/162) done. Loss: 0.0383 lr:0.001000 network_time: 0.0283
609
+ [ Fri Sep 16 02:50:33 2022 ] Eval epoch: 92
610
+ [ Fri Sep 16 02:52:22 2022 ] Mean test loss of 930 batches: 2.6518261432647705.
611
+ [ Fri Sep 16 02:52:23 2022 ] Top1: 54.07%
612
+ [ Fri Sep 16 02:52:23 2022 ] Top5: 81.02%
613
+ [ Fri Sep 16 02:52:23 2022 ] Training epoch: 93
614
+ [ Fri Sep 16 02:53:36 2022 ] Batch(95/162) done. Loss: 0.0064 lr:0.001000 network_time: 0.0273
615
+ [ Fri Sep 16 02:54:24 2022 ] Eval epoch: 93
616
+ [ Fri Sep 16 02:56:14 2022 ] Mean test loss of 930 batches: 2.8818185329437256.
617
+ [ Fri Sep 16 02:56:14 2022 ] Top1: 54.94%
618
+ [ Fri Sep 16 02:56:15 2022 ] Top5: 81.20%
619
+ [ Fri Sep 16 02:56:15 2022 ] Training epoch: 94
620
+ [ Fri Sep 16 02:56:43 2022 ] Batch(33/162) done. Loss: 0.0079 lr:0.001000 network_time: 0.0273
621
+ [ Fri Sep 16 02:57:55 2022 ] Batch(133/162) done. Loss: 0.0125 lr:0.001000 network_time: 0.0282
622
+ [ Fri Sep 16 02:58:16 2022 ] Eval epoch: 94
623
+ [ Fri Sep 16 03:00:05 2022 ] Mean test loss of 930 batches: 2.7161922454833984.
624
+ [ Fri Sep 16 03:00:06 2022 ] Top1: 55.58%
625
+ [ Fri Sep 16 03:00:06 2022 ] Top5: 81.59%
626
+ [ Fri Sep 16 03:00:06 2022 ] Training epoch: 95
627
+ [ Fri Sep 16 03:01:02 2022 ] Batch(71/162) done. Loss: 0.0110 lr:0.001000 network_time: 0.0322
628
+ [ Fri Sep 16 03:02:07 2022 ] Eval epoch: 95
629
+ [ Fri Sep 16 03:03:57 2022 ] Mean test loss of 930 batches: 2.7197964191436768.
630
+ [ Fri Sep 16 03:03:57 2022 ] Top1: 55.39%
631
+ [ Fri Sep 16 03:03:58 2022 ] Top5: 81.45%
632
+ [ Fri Sep 16 03:03:58 2022 ] Training epoch: 96
633
+ [ Fri Sep 16 03:04:08 2022 ] Batch(9/162) done. Loss: 0.0097 lr:0.001000 network_time: 0.0274
634
+ [ Fri Sep 16 03:05:21 2022 ] Batch(109/162) done. Loss: 0.0032 lr:0.001000 network_time: 0.0272
635
+ [ Fri Sep 16 03:05:59 2022 ] Eval epoch: 96
636
+ [ Fri Sep 16 03:07:49 2022 ] Mean test loss of 930 batches: 2.823249578475952.
637
+ [ Fri Sep 16 03:07:49 2022 ] Top1: 53.73%
638
+ [ Fri Sep 16 03:07:50 2022 ] Top5: 80.80%
639
+ [ Fri Sep 16 03:07:50 2022 ] Training epoch: 97
640
+ [ Fri Sep 16 03:08:28 2022 ] Batch(47/162) done. Loss: 0.0206 lr:0.001000 network_time: 0.0288
641
+ [ Fri Sep 16 03:09:41 2022 ] Batch(147/162) done. Loss: 0.0045 lr:0.001000 network_time: 0.0276
642
+ [ Fri Sep 16 03:09:51 2022 ] Eval epoch: 97
643
+ [ Fri Sep 16 03:11:41 2022 ] Mean test loss of 930 batches: 2.73584246635437.
644
+ [ Fri Sep 16 03:11:41 2022 ] Top1: 55.20%
645
+ [ Fri Sep 16 03:11:41 2022 ] Top5: 81.49%
646
+ [ Fri Sep 16 03:11:42 2022 ] Training epoch: 98
647
+ [ Fri Sep 16 03:12:47 2022 ] Batch(85/162) done. Loss: 0.0076 lr:0.001000 network_time: 0.0250
648
+ [ Fri Sep 16 03:13:43 2022 ] Eval epoch: 98
649
+ [ Fri Sep 16 03:15:32 2022 ] Mean test loss of 930 batches: 2.759752035140991.
650
+ [ Fri Sep 16 03:15:33 2022 ] Top1: 54.60%
651
+ [ Fri Sep 16 03:15:33 2022 ] Top5: 81.10%
652
+ [ Fri Sep 16 03:15:33 2022 ] Training epoch: 99
653
+ [ Fri Sep 16 03:15:54 2022 ] Batch(23/162) done. Loss: 0.0042 lr:0.001000 network_time: 0.0276
654
+ [ Fri Sep 16 03:17:06 2022 ] Batch(123/162) done. Loss: 0.0093 lr:0.001000 network_time: 0.0273
655
+ [ Fri Sep 16 03:17:34 2022 ] Eval epoch: 99
656
+ [ Fri Sep 16 03:19:24 2022 ] Mean test loss of 930 batches: 2.6689939498901367.
657
+ [ Fri Sep 16 03:19:24 2022 ] Top1: 53.70%
658
+ [ Fri Sep 16 03:19:24 2022 ] Top5: 80.66%
659
+ [ Fri Sep 16 03:19:25 2022 ] Training epoch: 100
660
+ [ Fri Sep 16 03:20:13 2022 ] Batch(61/162) done. Loss: 0.0058 lr:0.001000 network_time: 0.0324
661
+ [ Fri Sep 16 03:21:25 2022 ] Batch(161/162) done. Loss: 0.0059 lr:0.001000 network_time: 0.0423
662
+ [ Fri Sep 16 03:21:26 2022 ] Eval epoch: 100
663
+ [ Fri Sep 16 03:23:15 2022 ] Mean test loss of 930 batches: 2.663466691970825.
664
+ [ Fri Sep 16 03:23:15 2022 ] Top1: 54.84%
665
+ [ Fri Sep 16 03:23:16 2022 ] Top5: 81.30%
666
+ [ Fri Sep 16 03:23:16 2022 ] Training epoch: 101
667
+ [ Fri Sep 16 03:24:31 2022 ] Batch(99/162) done. Loss: 0.0043 lr:0.000100 network_time: 0.0320
668
+ [ Fri Sep 16 03:25:17 2022 ] Eval epoch: 101
669
+ [ Fri Sep 16 03:27:06 2022 ] Mean test loss of 930 batches: 2.7768349647521973.
670
+ [ Fri Sep 16 03:27:07 2022 ] Top1: 55.20%
671
+ [ Fri Sep 16 03:27:07 2022 ] Top5: 81.58%
672
+ [ Fri Sep 16 03:27:07 2022 ] Training epoch: 102
673
+ [ Fri Sep 16 03:27:38 2022 ] Batch(37/162) done. Loss: 0.0066 lr:0.000100 network_time: 0.0269
674
+ [ Fri Sep 16 03:28:51 2022 ] Batch(137/162) done. Loss: 0.0044 lr:0.000100 network_time: 0.0440
675
+ [ Fri Sep 16 03:29:08 2022 ] Eval epoch: 102
676
+ [ Fri Sep 16 03:30:58 2022 ] Mean test loss of 930 batches: 2.6662395000457764.
677
+ [ Fri Sep 16 03:30:58 2022 ] Top1: 54.99%
678
+ [ Fri Sep 16 03:30:58 2022 ] Top5: 81.35%
679
+ [ Fri Sep 16 03:30:59 2022 ] Training epoch: 103
680
+ [ Fri Sep 16 03:31:57 2022 ] Batch(75/162) done. Loss: 0.0094 lr:0.000100 network_time: 0.0336
681
+ [ Fri Sep 16 03:32:59 2022 ] Eval epoch: 103
682
+ [ Fri Sep 16 03:34:49 2022 ] Mean test loss of 930 batches: 2.8566136360168457.
683
+ [ Fri Sep 16 03:34:50 2022 ] Top1: 55.30%
684
+ [ Fri Sep 16 03:34:50 2022 ] Top5: 81.46%
685
+ [ Fri Sep 16 03:34:50 2022 ] Training epoch: 104
686
+ [ Fri Sep 16 03:35:04 2022 ] Batch(13/162) done. Loss: 0.0033 lr:0.000100 network_time: 0.0313
687
+ [ Fri Sep 16 03:36:16 2022 ] Batch(113/162) done. Loss: 0.0040 lr:0.000100 network_time: 0.0251
688
+ [ Fri Sep 16 03:36:52 2022 ] Eval epoch: 104
689
+ [ Fri Sep 16 03:38:41 2022 ] Mean test loss of 930 batches: 2.852644681930542.
690
+ [ Fri Sep 16 03:38:41 2022 ] Top1: 55.15%
691
+ [ Fri Sep 16 03:38:42 2022 ] Top5: 81.41%
692
+ [ Fri Sep 16 03:38:42 2022 ] Training epoch: 105
693
+ [ Fri Sep 16 03:39:23 2022 ] Batch(51/162) done. Loss: 0.0089 lr:0.000100 network_time: 0.0280
694
+ [ Fri Sep 16 03:40:36 2022 ] Batch(151/162) done. Loss: 0.0130 lr:0.000100 network_time: 0.0296
695
+ [ Fri Sep 16 03:40:43 2022 ] Eval epoch: 105
696
+ [ Fri Sep 16 03:42:33 2022 ] Mean test loss of 930 batches: 2.7096853256225586.
697
+ [ Fri Sep 16 03:42:33 2022 ] Top1: 54.55%
698
+ [ Fri Sep 16 03:42:34 2022 ] Top5: 81.34%
699
+ [ Fri Sep 16 03:42:34 2022 ] Training epoch: 106
700
+ [ Fri Sep 16 03:43:42 2022 ] Batch(89/162) done. Loss: 0.0056 lr:0.000100 network_time: 0.0260
701
+ [ Fri Sep 16 03:44:35 2022 ] Eval epoch: 106
702
+ [ Fri Sep 16 03:46:24 2022 ] Mean test loss of 930 batches: 2.6621322631835938.
703
+ [ Fri Sep 16 03:46:25 2022 ] Top1: 55.51%
704
+ [ Fri Sep 16 03:46:25 2022 ] Top5: 81.55%
705
+ [ Fri Sep 16 03:46:25 2022 ] Training epoch: 107
706
+ [ Fri Sep 16 03:46:49 2022 ] Batch(27/162) done. Loss: 0.0082 lr:0.000100 network_time: 0.0310
707
+ [ Fri Sep 16 03:48:02 2022 ] Batch(127/162) done. Loss: 0.0041 lr:0.000100 network_time: 0.0255
708
+ [ Fri Sep 16 03:48:27 2022 ] Eval epoch: 107
709
+ [ Fri Sep 16 03:50:16 2022 ] Mean test loss of 930 batches: 2.7165894508361816.
710
+ [ Fri Sep 16 03:50:16 2022 ] Top1: 55.25%
711
+ [ Fri Sep 16 03:50:17 2022 ] Top5: 81.56%
712
+ [ Fri Sep 16 03:50:17 2022 ] Training epoch: 108
713
+ [ Fri Sep 16 03:51:08 2022 ] Batch(65/162) done. Loss: 0.0049 lr:0.000100 network_time: 0.0263
714
+ [ Fri Sep 16 03:52:18 2022 ] Eval epoch: 108
715
+ [ Fri Sep 16 03:54:08 2022 ] Mean test loss of 930 batches: 2.8998818397521973.
716
+ [ Fri Sep 16 03:54:08 2022 ] Top1: 55.12%
717
+ [ Fri Sep 16 03:54:08 2022 ] Top5: 81.39%
718
+ [ Fri Sep 16 03:54:09 2022 ] Training epoch: 109
719
+ [ Fri Sep 16 03:54:15 2022 ] Batch(3/162) done. Loss: 0.0049 lr:0.000100 network_time: 0.0276
720
+ [ Fri Sep 16 03:55:27 2022 ] Batch(103/162) done. Loss: 0.0049 lr:0.000100 network_time: 0.0318
721
+ [ Fri Sep 16 03:56:10 2022 ] Eval epoch: 109
722
+ [ Fri Sep 16 03:58:00 2022 ] Mean test loss of 930 batches: 2.7563014030456543.
723
+ [ Fri Sep 16 03:58:00 2022 ] Top1: 54.75%
724
+ [ Fri Sep 16 03:58:01 2022 ] Top5: 81.21%
725
+ [ Fri Sep 16 03:58:01 2022 ] Training epoch: 110
726
+ [ Fri Sep 16 03:58:34 2022 ] Batch(41/162) done. Loss: 0.0130 lr:0.000100 network_time: 0.0276
727
+ [ Fri Sep 16 03:59:47 2022 ] Batch(141/162) done. Loss: 0.0035 lr:0.000100 network_time: 0.0289
728
+ [ Fri Sep 16 04:00:02 2022 ] Eval epoch: 110
729
+ [ Fri Sep 16 04:01:51 2022 ] Mean test loss of 930 batches: 2.6887638568878174.
730
+ [ Fri Sep 16 04:01:52 2022 ] Top1: 54.75%
731
+ [ Fri Sep 16 04:01:52 2022 ] Top5: 81.20%
732
+ [ Fri Sep 16 04:01:53 2022 ] Training epoch: 111
733
+ [ Fri Sep 16 04:02:54 2022 ] Batch(79/162) done. Loss: 0.0023 lr:0.000100 network_time: 0.0279
734
+ [ Fri Sep 16 04:03:53 2022 ] Eval epoch: 111
735
+ [ Fri Sep 16 04:05:43 2022 ] Mean test loss of 930 batches: 2.7811808586120605.
736
+ [ Fri Sep 16 04:05:43 2022 ] Top1: 53.83%
737
+ [ Fri Sep 16 04:05:43 2022 ] Top5: 80.74%
738
+ [ Fri Sep 16 04:05:44 2022 ] Training epoch: 112
739
+ [ Fri Sep 16 04:05:59 2022 ] Batch(17/162) done. Loss: 0.0050 lr:0.000100 network_time: 0.0267
740
+ [ Fri Sep 16 04:07:12 2022 ] Batch(117/162) done. Loss: 0.0069 lr:0.000100 network_time: 0.0230
741
+ [ Fri Sep 16 04:07:44 2022 ] Eval epoch: 112
742
+ [ Fri Sep 16 04:09:34 2022 ] Mean test loss of 930 batches: 2.7857918739318848.
743
+ [ Fri Sep 16 04:09:34 2022 ] Top1: 55.56%
744
+ [ Fri Sep 16 04:09:35 2022 ] Top5: 81.54%
745
+ [ Fri Sep 16 04:09:35 2022 ] Training epoch: 113
746
+ [ Fri Sep 16 04:10:19 2022 ] Batch(55/162) done. Loss: 0.0080 lr:0.000100 network_time: 0.0274
747
+ [ Fri Sep 16 04:11:31 2022 ] Batch(155/162) done. Loss: 0.0046 lr:0.000100 network_time: 0.0267
748
+ [ Fri Sep 16 04:11:36 2022 ] Eval epoch: 113
749
+ [ Fri Sep 16 04:13:25 2022 ] Mean test loss of 930 batches: 2.7522940635681152.
750
+ [ Fri Sep 16 04:13:26 2022 ] Top1: 52.42%
751
+ [ Fri Sep 16 04:13:26 2022 ] Top5: 79.89%
752
+ [ Fri Sep 16 04:13:27 2022 ] Training epoch: 114
753
+ [ Fri Sep 16 04:14:38 2022 ] Batch(93/162) done. Loss: 0.0080 lr:0.000100 network_time: 0.0279
754
+ [ Fri Sep 16 04:15:28 2022 ] Eval epoch: 114
755
+ [ Fri Sep 16 04:17:17 2022 ] Mean test loss of 930 batches: 2.7601916790008545.
756
+ [ Fri Sep 16 04:17:18 2022 ] Top1: 55.44%
757
+ [ Fri Sep 16 04:17:18 2022 ] Top5: 81.62%
758
+ [ Fri Sep 16 04:17:18 2022 ] Training epoch: 115
759
+ [ Fri Sep 16 04:17:44 2022 ] Batch(31/162) done. Loss: 0.0056 lr:0.000100 network_time: 0.0279
760
+ [ Fri Sep 16 04:18:57 2022 ] Batch(131/162) done. Loss: 0.0066 lr:0.000100 network_time: 0.0255
761
+ [ Fri Sep 16 04:19:19 2022 ] Eval epoch: 115
762
+ [ Fri Sep 16 04:21:09 2022 ] Mean test loss of 930 batches: 2.7520415782928467.
763
+ [ Fri Sep 16 04:21:10 2022 ] Top1: 55.05%
764
+ [ Fri Sep 16 04:21:10 2022 ] Top5: 81.05%
765
+ [ Fri Sep 16 04:21:10 2022 ] Training epoch: 116
766
+ [ Fri Sep 16 04:22:04 2022 ] Batch(69/162) done. Loss: 0.0031 lr:0.000100 network_time: 0.0274
767
+ [ Fri Sep 16 04:23:11 2022 ] Eval epoch: 116
768
+ [ Fri Sep 16 04:25:01 2022 ] Mean test loss of 930 batches: 2.7753899097442627.
769
+ [ Fri Sep 16 04:25:01 2022 ] Top1: 55.38%
770
+ [ Fri Sep 16 04:25:02 2022 ] Top5: 81.63%
771
+ [ Fri Sep 16 04:25:02 2022 ] Training epoch: 117
772
+ [ Fri Sep 16 04:25:11 2022 ] Batch(7/162) done. Loss: 0.0121 lr:0.000100 network_time: 0.0308
773
+ [ Fri Sep 16 04:26:24 2022 ] Batch(107/162) done. Loss: 0.0023 lr:0.000100 network_time: 0.0230
774
+ [ Fri Sep 16 04:27:03 2022 ] Eval epoch: 117
775
+ [ Fri Sep 16 04:28:52 2022 ] Mean test loss of 930 batches: 2.677675724029541.
776
+ [ Fri Sep 16 04:28:53 2022 ] Top1: 55.50%
777
+ [ Fri Sep 16 04:28:53 2022 ] Top5: 81.51%
778
+ [ Fri Sep 16 04:28:53 2022 ] Training epoch: 118
779
+ [ Fri Sep 16 04:29:30 2022 ] Batch(45/162) done. Loss: 0.0061 lr:0.000100 network_time: 0.0275
780
+ [ Fri Sep 16 04:30:42 2022 ] Batch(145/162) done. Loss: 0.0047 lr:0.000100 network_time: 0.0271
781
+ [ Fri Sep 16 04:30:54 2022 ] Eval epoch: 118
782
+ [ Fri Sep 16 04:32:44 2022 ] Mean test loss of 930 batches: 2.7359731197357178.
783
+ [ Fri Sep 16 04:32:44 2022 ] Top1: 55.38%
784
+ [ Fri Sep 16 04:32:45 2022 ] Top5: 81.53%
785
+ [ Fri Sep 16 04:32:45 2022 ] Training epoch: 119
786
+ [ Fri Sep 16 04:33:49 2022 ] Batch(83/162) done. Loss: 0.0051 lr:0.000100 network_time: 0.0273
787
+ [ Fri Sep 16 04:34:46 2022 ] Eval epoch: 119
788
+ [ Fri Sep 16 04:36:36 2022 ] Mean test loss of 930 batches: 2.6921119689941406.
789
+ [ Fri Sep 16 04:36:36 2022 ] Top1: 55.72%
790
+ [ Fri Sep 16 04:36:36 2022 ] Top5: 81.66%
791
+ [ Fri Sep 16 04:36:37 2022 ] Training epoch: 120
792
+ [ Fri Sep 16 04:36:56 2022 ] Batch(21/162) done. Loss: 0.0079 lr:0.000100 network_time: 0.0326
793
+ [ Fri Sep 16 04:38:09 2022 ] Batch(121/162) done. Loss: 0.0039 lr:0.000100 network_time: 0.0281
794
+ [ Fri Sep 16 04:38:38 2022 ] Eval epoch: 120
795
+ [ Fri Sep 16 04:40:27 2022 ] Mean test loss of 930 batches: 2.6823599338531494.
796
+ [ Fri Sep 16 04:40:28 2022 ] Top1: 55.41%
797
+ [ Fri Sep 16 04:40:28 2022 ] Top5: 81.65%
798
+ [ Fri Sep 16 04:40:29 2022 ] Training epoch: 121
799
+ [ Fri Sep 16 04:41:15 2022 ] Batch(59/162) done. Loss: 0.0046 lr:0.000100 network_time: 0.0319
800
+ [ Fri Sep 16 04:42:28 2022 ] Batch(159/162) done. Loss: 0.0268 lr:0.000100 network_time: 0.0277
801
+ [ Fri Sep 16 04:42:30 2022 ] Eval epoch: 121
802
+ [ Fri Sep 16 04:44:19 2022 ] Mean test loss of 930 batches: 2.6829521656036377.
803
+ [ Fri Sep 16 04:44:20 2022 ] Top1: 55.41%
804
+ [ Fri Sep 16 04:44:20 2022 ] Top5: 81.51%
805
+ [ Fri Sep 16 04:44:20 2022 ] Training epoch: 122
806
+ [ Fri Sep 16 04:45:35 2022 ] Batch(97/162) done. Loss: 0.0045 lr:0.000100 network_time: 0.0272
807
+ [ Fri Sep 16 04:46:21 2022 ] Eval epoch: 122
808
+ [ Fri Sep 16 04:48:11 2022 ] Mean test loss of 930 batches: 2.6855432987213135.
809
+ [ Fri Sep 16 04:48:12 2022 ] Top1: 55.09%
810
+ [ Fri Sep 16 04:48:12 2022 ] Top5: 81.43%
811
+ [ Fri Sep 16 04:48:12 2022 ] Training epoch: 123
812
+ [ Fri Sep 16 04:48:41 2022 ] Batch(35/162) done. Loss: 0.0065 lr:0.000100 network_time: 0.0278
813
+ [ Fri Sep 16 04:49:54 2022 ] Batch(135/162) done. Loss: 0.0055 lr:0.000100 network_time: 0.0276
814
+ [ Fri Sep 16 04:50:13 2022 ] Eval epoch: 123
815
+ [ Fri Sep 16 04:52:03 2022 ] Mean test loss of 930 batches: 2.808767557144165.
816
+ [ Fri Sep 16 04:52:03 2022 ] Top1: 54.43%
817
+ [ Fri Sep 16 04:52:04 2022 ] Top5: 81.16%
818
+ [ Fri Sep 16 04:52:04 2022 ] Training epoch: 124
819
+ [ Fri Sep 16 04:53:01 2022 ] Batch(73/162) done. Loss: 0.0041 lr:0.000100 network_time: 0.0274
820
+ [ Fri Sep 16 04:54:05 2022 ] Eval epoch: 124
821
+ [ Fri Sep 16 04:55:55 2022 ] Mean test loss of 930 batches: 2.7997090816497803.
822
+ [ Fri Sep 16 04:55:55 2022 ] Top1: 55.73%
823
+ [ Fri Sep 16 04:55:56 2022 ] Top5: 81.60%
824
+ [ Fri Sep 16 04:55:56 2022 ] Training epoch: 125
825
+ [ Fri Sep 16 04:56:08 2022 ] Batch(11/162) done. Loss: 0.0022 lr:0.000100 network_time: 0.0291
826
+ [ Fri Sep 16 04:57:21 2022 ] Batch(111/162) done. Loss: 0.0030 lr:0.000100 network_time: 0.0370
827
+ [ Fri Sep 16 04:57:57 2022 ] Eval epoch: 125
828
+ [ Fri Sep 16 04:59:46 2022 ] Mean test loss of 930 batches: 2.8247811794281006.
829
+ [ Fri Sep 16 04:59:47 2022 ] Top1: 54.96%
830
+ [ Fri Sep 16 04:59:47 2022 ] Top5: 81.33%
831
+ [ Fri Sep 16 04:59:48 2022 ] Training epoch: 126
832
+ [ Fri Sep 16 05:00:27 2022 ] Batch(49/162) done. Loss: 0.0083 lr:0.000100 network_time: 0.0265
833
+ [ Fri Sep 16 05:01:40 2022 ] Batch(149/162) done. Loss: 0.0065 lr:0.000100 network_time: 0.0265
834
+ [ Fri Sep 16 05:01:49 2022 ] Eval epoch: 126
835
+ [ Fri Sep 16 05:03:38 2022 ] Mean test loss of 930 batches: 2.709444046020508.
836
+ [ Fri Sep 16 05:03:38 2022 ] Top1: 54.90%
837
+ [ Fri Sep 16 05:03:39 2022 ] Top5: 81.36%
838
+ [ Fri Sep 16 05:03:39 2022 ] Training epoch: 127
839
+ [ Fri Sep 16 05:04:46 2022 ] Batch(87/162) done. Loss: 0.0068 lr:0.000100 network_time: 0.0284
840
+ [ Fri Sep 16 05:05:40 2022 ] Eval epoch: 127
841
+ [ Fri Sep 16 05:07:29 2022 ] Mean test loss of 930 batches: 2.721369743347168.
842
+ [ Fri Sep 16 05:07:30 2022 ] Top1: 55.40%
843
+ [ Fri Sep 16 05:07:30 2022 ] Top5: 81.58%
844
+ [ Fri Sep 16 05:07:31 2022 ] Training epoch: 128
845
+ [ Fri Sep 16 05:07:52 2022 ] Batch(25/162) done. Loss: 0.0107 lr:0.000100 network_time: 0.0350
846
+ [ Fri Sep 16 05:09:05 2022 ] Batch(125/162) done. Loss: 0.0059 lr:0.000100 network_time: 0.0300
847
+ [ Fri Sep 16 05:09:32 2022 ] Eval epoch: 128
848
+ [ Fri Sep 16 05:11:21 2022 ] Mean test loss of 930 batches: 2.696549892425537.
849
+ [ Fri Sep 16 05:11:21 2022 ] Top1: 55.42%
850
+ [ Fri Sep 16 05:11:22 2022 ] Top5: 81.51%
851
+ [ Fri Sep 16 05:11:22 2022 ] Training epoch: 129
852
+ [ Fri Sep 16 05:12:11 2022 ] Batch(63/162) done. Loss: 0.0023 lr:0.000100 network_time: 0.0281
853
+ [ Fri Sep 16 05:13:23 2022 ] Eval epoch: 129
854
+ [ Fri Sep 16 05:15:12 2022 ] Mean test loss of 930 batches: 2.860775947570801.
855
+ [ Fri Sep 16 05:15:13 2022 ] Top1: 55.20%
856
+ [ Fri Sep 16 05:15:13 2022 ] Top5: 81.41%
857
+ [ Fri Sep 16 05:15:13 2022 ] Training epoch: 130
858
+ [ Fri Sep 16 05:15:18 2022 ] Batch(1/162) done. Loss: 0.0050 lr:0.000100 network_time: 0.0282
859
+ [ Fri Sep 16 05:16:30 2022 ] Batch(101/162) done. Loss: 0.0149 lr:0.000100 network_time: 0.0232
860
+ [ Fri Sep 16 05:17:14 2022 ] Eval epoch: 130
861
+ [ Fri Sep 16 05:19:04 2022 ] Mean test loss of 930 batches: 2.670889139175415.
862
+ [ Fri Sep 16 05:19:04 2022 ] Top1: 55.61%
863
+ [ Fri Sep 16 05:19:04 2022 ] Top5: 81.70%
864
+ [ Fri Sep 16 05:19:05 2022 ] Training epoch: 131
865
+ [ Fri Sep 16 05:19:37 2022 ] Batch(39/162) done. Loss: 0.0060 lr:0.000100 network_time: 0.0342
866
+ [ Fri Sep 16 05:20:50 2022 ] Batch(139/162) done. Loss: 0.0074 lr:0.000100 network_time: 0.0274
867
+ [ Fri Sep 16 05:21:06 2022 ] Eval epoch: 131
868
+ [ Fri Sep 16 05:22:55 2022 ] Mean test loss of 930 batches: 2.783825397491455.
869
+ [ Fri Sep 16 05:22:56 2022 ] Top1: 55.61%
870
+ [ Fri Sep 16 05:22:56 2022 ] Top5: 81.69%
871
+ [ Fri Sep 16 05:22:57 2022 ] Training epoch: 132
872
+ [ Fri Sep 16 05:23:56 2022 ] Batch(77/162) done. Loss: 0.0043 lr:0.000100 network_time: 0.0273
873
+ [ Fri Sep 16 05:24:58 2022 ] Eval epoch: 132
874
+ [ Fri Sep 16 05:26:47 2022 ] Mean test loss of 930 batches: 2.690352201461792.
875
+ [ Fri Sep 16 05:26:47 2022 ] Top1: 54.85%
876
+ [ Fri Sep 16 05:26:48 2022 ] Top5: 81.45%
877
+ [ Fri Sep 16 05:26:48 2022 ] Training epoch: 133
878
+ [ Fri Sep 16 05:27:03 2022 ] Batch(15/162) done. Loss: 0.0041 lr:0.000100 network_time: 0.0273
879
+ [ Fri Sep 16 05:28:15 2022 ] Batch(115/162) done. Loss: 0.0088 lr:0.000100 network_time: 0.0270
880
+ [ Fri Sep 16 05:28:49 2022 ] Eval epoch: 133
881
+ [ Fri Sep 16 05:30:39 2022 ] Mean test loss of 930 batches: 2.7724499702453613.
882
+ [ Fri Sep 16 05:30:39 2022 ] Top1: 53.06%
883
+ [ Fri Sep 16 05:30:39 2022 ] Top5: 80.30%
884
+ [ Fri Sep 16 05:30:40 2022 ] Training epoch: 134
885
+ [ Fri Sep 16 05:31:22 2022 ] Batch(53/162) done. Loss: 0.0106 lr:0.000100 network_time: 0.0274
886
+ [ Fri Sep 16 05:32:35 2022 ] Batch(153/162) done. Loss: 0.0159 lr:0.000100 network_time: 0.0266
887
+ [ Fri Sep 16 05:32:41 2022 ] Eval epoch: 134
888
+ [ Fri Sep 16 05:34:30 2022 ] Mean test loss of 930 batches: 2.824082136154175.
889
+ [ Fri Sep 16 05:34:31 2022 ] Top1: 53.24%
890
+ [ Fri Sep 16 05:34:31 2022 ] Top5: 80.41%
891
+ [ Fri Sep 16 05:34:31 2022 ] Training epoch: 135
892
+ [ Fri Sep 16 05:35:41 2022 ] Batch(91/162) done. Loss: 0.0025 lr:0.000100 network_time: 0.0316
893
+ [ Fri Sep 16 05:36:32 2022 ] Eval epoch: 135
894
+ [ Fri Sep 16 05:38:21 2022 ] Mean test loss of 930 batches: 2.7413904666900635.
895
+ [ Fri Sep 16 05:38:22 2022 ] Top1: 55.46%
896
+ [ Fri Sep 16 05:38:22 2022 ] Top5: 81.65%
897
+ [ Fri Sep 16 05:38:22 2022 ] Training epoch: 136
898
+ [ Fri Sep 16 05:38:47 2022 ] Batch(29/162) done. Loss: 0.0073 lr:0.000100 network_time: 0.0287
899
+ [ Fri Sep 16 05:40:00 2022 ] Batch(129/162) done. Loss: 0.0043 lr:0.000100 network_time: 0.0282
900
+ [ Fri Sep 16 05:40:23 2022 ] Eval epoch: 136
901
+ [ Fri Sep 16 05:42:13 2022 ] Mean test loss of 930 batches: 2.650205373764038.
902
+ [ Fri Sep 16 05:42:13 2022 ] Top1: 55.57%
903
+ [ Fri Sep 16 05:42:14 2022 ] Top5: 81.71%
904
+ [ Fri Sep 16 05:42:14 2022 ] Training epoch: 137
905
+ [ Fri Sep 16 05:43:07 2022 ] Batch(67/162) done. Loss: 0.0036 lr:0.000100 network_time: 0.0311
906
+ [ Fri Sep 16 05:44:16 2022 ] Eval epoch: 137
907
+ [ Fri Sep 16 05:46:05 2022 ] Mean test loss of 930 batches: 2.8283350467681885.
908
+ [ Fri Sep 16 05:46:05 2022 ] Top1: 55.00%
909
+ [ Fri Sep 16 05:46:06 2022 ] Top5: 81.16%
910
+ [ Fri Sep 16 05:46:06 2022 ] Training epoch: 138
911
+ [ Fri Sep 16 05:46:14 2022 ] Batch(5/162) done. Loss: 0.0037 lr:0.000100 network_time: 0.0520
912
+ [ Fri Sep 16 05:47:26 2022 ] Batch(105/162) done. Loss: 0.0045 lr:0.000100 network_time: 0.0287
913
+ [ Fri Sep 16 05:48:07 2022 ] Eval epoch: 138
914
+ [ Fri Sep 16 05:49:57 2022 ] Mean test loss of 930 batches: 2.746467113494873.
915
+ [ Fri Sep 16 05:49:57 2022 ] Top1: 55.47%
916
+ [ Fri Sep 16 05:49:57 2022 ] Top5: 81.55%
917
+ [ Fri Sep 16 05:49:58 2022 ] Training epoch: 139
918
+ [ Fri Sep 16 05:50:33 2022 ] Batch(43/162) done. Loss: 0.0108 lr:0.000100 network_time: 0.0343
919
+ [ Fri Sep 16 05:51:46 2022 ] Batch(143/162) done. Loss: 0.0025 lr:0.000100 network_time: 0.0269
920
+ [ Fri Sep 16 05:51:59 2022 ] Eval epoch: 139
921
+ [ Fri Sep 16 05:53:48 2022 ] Mean test loss of 930 batches: 2.8351552486419678.
922
+ [ Fri Sep 16 05:53:49 2022 ] Top1: 54.96%
923
+ [ Fri Sep 16 05:53:49 2022 ] Top5: 81.28%
924
+ [ Fri Sep 16 05:53:49 2022 ] Training epoch: 140
925
+ [ Fri Sep 16 05:54:52 2022 ] Batch(81/162) done. Loss: 0.0037 lr:0.000100 network_time: 0.0271
926
+ [ Fri Sep 16 05:55:50 2022 ] Eval epoch: 140
927
+ [ Fri Sep 16 05:57:39 2022 ] Mean test loss of 930 batches: 2.6571571826934814.
928
+ [ Fri Sep 16 05:57:39 2022 ] Top1: 54.60%
929
+ [ Fri Sep 16 05:57:40 2022 ] Top5: 81.13%
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_motion_xset/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_joint_xset
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xset/train_joint.yaml
5
+ device:
6
+ - 4
7
+ - 5
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_joint_xset
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_joint_xset
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:491e9c65f6add18507396ed4656525a266bcb9d27d032137752a84ee32646935
3
+ size 34946665
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/log.txt ADDED
@@ -0,0 +1,929 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 15 20:53:21 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_joint_xset', 'model_saved_name': './save_models/ntu120_joint_xset', 'Experiment_name': 'ntu120_joint_xset', 'config': './config/ntu120_xset/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Thu Sep 15 20:53:21 2022 ] Training epoch: 1
5
+ [ Thu Sep 15 20:54:39 2022 ] Batch(99/162) done. Loss: 3.0592 lr:0.100000 network_time: 0.0311
6
+ [ Thu Sep 15 20:55:24 2022 ] Eval epoch: 1
7
+ [ Thu Sep 15 20:57:13 2022 ] Mean test loss of 930 batches: 4.616232872009277.
8
+ [ Thu Sep 15 20:57:14 2022 ] Top1: 9.34%
9
+ [ Thu Sep 15 20:57:14 2022 ] Top5: 28.52%
10
+ [ Thu Sep 15 20:57:14 2022 ] Training epoch: 2
11
+ [ Thu Sep 15 20:57:45 2022 ] Batch(37/162) done. Loss: 2.1872 lr:0.100000 network_time: 0.0261
12
+ [ Thu Sep 15 20:58:58 2022 ] Batch(137/162) done. Loss: 2.5467 lr:0.100000 network_time: 0.0305
13
+ [ Thu Sep 15 20:59:16 2022 ] Eval epoch: 2
14
+ [ Thu Sep 15 21:01:04 2022 ] Mean test loss of 930 batches: 4.0941853523254395.
15
+ [ Thu Sep 15 21:01:04 2022 ] Top1: 17.66%
16
+ [ Thu Sep 15 21:01:05 2022 ] Top5: 40.94%
17
+ [ Thu Sep 15 21:01:05 2022 ] Training epoch: 3
18
+ [ Thu Sep 15 21:02:04 2022 ] Batch(75/162) done. Loss: 2.1980 lr:0.100000 network_time: 0.0310
19
+ [ Thu Sep 15 21:03:07 2022 ] Eval epoch: 3
20
+ [ Thu Sep 15 21:04:56 2022 ] Mean test loss of 930 batches: 3.8105180263519287.
21
+ [ Thu Sep 15 21:04:57 2022 ] Top1: 22.06%
22
+ [ Thu Sep 15 21:04:57 2022 ] Top5: 46.13%
23
+ [ Thu Sep 15 21:04:57 2022 ] Training epoch: 4
24
+ [ Thu Sep 15 21:05:11 2022 ] Batch(13/162) done. Loss: 2.1092 lr:0.100000 network_time: 0.0317
25
+ [ Thu Sep 15 21:06:24 2022 ] Batch(113/162) done. Loss: 2.0003 lr:0.100000 network_time: 0.0312
26
+ [ Thu Sep 15 21:06:59 2022 ] Eval epoch: 4
27
+ [ Thu Sep 15 21:08:48 2022 ] Mean test loss of 930 batches: 3.274050235748291.
28
+ [ Thu Sep 15 21:08:48 2022 ] Top1: 25.47%
29
+ [ Thu Sep 15 21:08:49 2022 ] Top5: 52.02%
30
+ [ Thu Sep 15 21:08:49 2022 ] Training epoch: 5
31
+ [ Thu Sep 15 21:09:30 2022 ] Batch(51/162) done. Loss: 1.7818 lr:0.100000 network_time: 0.0321
32
+ [ Thu Sep 15 21:10:43 2022 ] Batch(151/162) done. Loss: 2.0315 lr:0.100000 network_time: 0.0261
33
+ [ Thu Sep 15 21:10:50 2022 ] Eval epoch: 5
34
+ [ Thu Sep 15 21:12:39 2022 ] Mean test loss of 930 batches: 3.1566548347473145.
35
+ [ Thu Sep 15 21:12:39 2022 ] Top1: 30.55%
36
+ [ Thu Sep 15 21:12:40 2022 ] Top5: 57.31%
37
+ [ Thu Sep 15 21:12:40 2022 ] Training epoch: 6
38
+ [ Thu Sep 15 21:13:49 2022 ] Batch(89/162) done. Loss: 1.6567 lr:0.100000 network_time: 0.0264
39
+ [ Thu Sep 15 21:14:41 2022 ] Eval epoch: 6
40
+ [ Thu Sep 15 21:16:30 2022 ] Mean test loss of 930 batches: 3.1373534202575684.
41
+ [ Thu Sep 15 21:16:30 2022 ] Top1: 30.52%
42
+ [ Thu Sep 15 21:16:31 2022 ] Top5: 59.38%
43
+ [ Thu Sep 15 21:16:31 2022 ] Training epoch: 7
44
+ [ Thu Sep 15 21:16:55 2022 ] Batch(27/162) done. Loss: 1.5083 lr:0.100000 network_time: 0.0271
45
+ [ Thu Sep 15 21:18:07 2022 ] Batch(127/162) done. Loss: 1.1499 lr:0.100000 network_time: 0.0273
46
+ [ Thu Sep 15 21:18:32 2022 ] Eval epoch: 7
47
+ [ Thu Sep 15 21:20:21 2022 ] Mean test loss of 930 batches: 2.8039331436157227.
48
+ [ Thu Sep 15 21:20:22 2022 ] Top1: 33.04%
49
+ [ Thu Sep 15 21:20:22 2022 ] Top5: 64.04%
50
+ [ Thu Sep 15 21:20:22 2022 ] Training epoch: 8
51
+ [ Thu Sep 15 21:21:13 2022 ] Batch(65/162) done. Loss: 1.4526 lr:0.100000 network_time: 0.0333
52
+ [ Thu Sep 15 21:22:23 2022 ] Eval epoch: 8
53
+ [ Thu Sep 15 21:24:13 2022 ] Mean test loss of 930 batches: 2.763702392578125.
54
+ [ Thu Sep 15 21:24:13 2022 ] Top1: 34.54%
55
+ [ Thu Sep 15 21:24:14 2022 ] Top5: 65.50%
56
+ [ Thu Sep 15 21:24:14 2022 ] Training epoch: 9
57
+ [ Thu Sep 15 21:24:20 2022 ] Batch(3/162) done. Loss: 0.9275 lr:0.100000 network_time: 0.0312
58
+ [ Thu Sep 15 21:25:33 2022 ] Batch(103/162) done. Loss: 1.3035 lr:0.100000 network_time: 0.0286
59
+ [ Thu Sep 15 21:26:15 2022 ] Eval epoch: 9
60
+ [ Thu Sep 15 21:28:04 2022 ] Mean test loss of 930 batches: 2.8759517669677734.
61
+ [ Thu Sep 15 21:28:04 2022 ] Top1: 35.49%
62
+ [ Thu Sep 15 21:28:05 2022 ] Top5: 66.09%
63
+ [ Thu Sep 15 21:28:05 2022 ] Training epoch: 10
64
+ [ Thu Sep 15 21:28:39 2022 ] Batch(41/162) done. Loss: 1.0014 lr:0.100000 network_time: 0.0270
65
+ [ Thu Sep 15 21:29:51 2022 ] Batch(141/162) done. Loss: 1.1646 lr:0.100000 network_time: 0.0262
66
+ [ Thu Sep 15 21:30:06 2022 ] Eval epoch: 10
67
+ [ Thu Sep 15 21:31:55 2022 ] Mean test loss of 930 batches: 2.6339988708496094.
68
+ [ Thu Sep 15 21:31:55 2022 ] Top1: 38.66%
69
+ [ Thu Sep 15 21:31:56 2022 ] Top5: 68.13%
70
+ [ Thu Sep 15 21:31:56 2022 ] Training epoch: 11
71
+ [ Thu Sep 15 21:32:57 2022 ] Batch(79/162) done. Loss: 1.1789 lr:0.100000 network_time: 0.0293
72
+ [ Thu Sep 15 21:33:57 2022 ] Eval epoch: 11
73
+ [ Thu Sep 15 21:35:45 2022 ] Mean test loss of 930 batches: 3.1947619915008545.
74
+ [ Thu Sep 15 21:35:45 2022 ] Top1: 34.92%
75
+ [ Thu Sep 15 21:35:46 2022 ] Top5: 66.34%
76
+ [ Thu Sep 15 21:35:46 2022 ] Training epoch: 12
77
+ [ Thu Sep 15 21:36:02 2022 ] Batch(17/162) done. Loss: 0.9280 lr:0.100000 network_time: 0.0267
78
+ [ Thu Sep 15 21:37:15 2022 ] Batch(117/162) done. Loss: 1.1294 lr:0.100000 network_time: 0.0273
79
+ [ Thu Sep 15 21:37:47 2022 ] Eval epoch: 12
80
+ [ Thu Sep 15 21:39:35 2022 ] Mean test loss of 930 batches: 2.5820627212524414.
81
+ [ Thu Sep 15 21:39:36 2022 ] Top1: 40.86%
82
+ [ Thu Sep 15 21:39:36 2022 ] Top5: 71.07%
83
+ [ Thu Sep 15 21:39:36 2022 ] Training epoch: 13
84
+ [ Thu Sep 15 21:40:20 2022 ] Batch(55/162) done. Loss: 0.7400 lr:0.100000 network_time: 0.0305
85
+ [ Thu Sep 15 21:41:33 2022 ] Batch(155/162) done. Loss: 0.9805 lr:0.100000 network_time: 0.0278
86
+ [ Thu Sep 15 21:41:38 2022 ] Eval epoch: 13
87
+ [ Thu Sep 15 21:43:27 2022 ] Mean test loss of 930 batches: 2.6807053089141846.
88
+ [ Thu Sep 15 21:43:27 2022 ] Top1: 38.93%
89
+ [ Thu Sep 15 21:43:28 2022 ] Top5: 71.42%
90
+ [ Thu Sep 15 21:43:28 2022 ] Training epoch: 14
91
+ [ Thu Sep 15 21:44:40 2022 ] Batch(93/162) done. Loss: 1.1675 lr:0.100000 network_time: 0.0270
92
+ [ Thu Sep 15 21:45:29 2022 ] Eval epoch: 14
93
+ [ Thu Sep 15 21:47:19 2022 ] Mean test loss of 930 batches: 2.6006383895874023.
94
+ [ Thu Sep 15 21:47:19 2022 ] Top1: 41.03%
95
+ [ Thu Sep 15 21:47:20 2022 ] Top5: 71.68%
96
+ [ Thu Sep 15 21:47:20 2022 ] Training epoch: 15
97
+ [ Thu Sep 15 21:47:47 2022 ] Batch(31/162) done. Loss: 0.9053 lr:0.100000 network_time: 0.0286
98
+ [ Thu Sep 15 21:48:59 2022 ] Batch(131/162) done. Loss: 0.7565 lr:0.100000 network_time: 0.0318
99
+ [ Thu Sep 15 21:49:22 2022 ] Eval epoch: 15
100
+ [ Thu Sep 15 21:51:11 2022 ] Mean test loss of 930 batches: 2.5753092765808105.
101
+ [ Thu Sep 15 21:51:11 2022 ] Top1: 41.86%
102
+ [ Thu Sep 15 21:51:12 2022 ] Top5: 71.00%
103
+ [ Thu Sep 15 21:51:12 2022 ] Training epoch: 16
104
+ [ Thu Sep 15 21:52:06 2022 ] Batch(69/162) done. Loss: 0.9629 lr:0.100000 network_time: 0.0280
105
+ [ Thu Sep 15 21:53:13 2022 ] Eval epoch: 16
106
+ [ Thu Sep 15 21:55:02 2022 ] Mean test loss of 930 batches: 2.642726182937622.
107
+ [ Thu Sep 15 21:55:02 2022 ] Top1: 42.77%
108
+ [ Thu Sep 15 21:55:03 2022 ] Top5: 73.14%
109
+ [ Thu Sep 15 21:55:03 2022 ] Training epoch: 17
110
+ [ Thu Sep 15 21:55:12 2022 ] Batch(7/162) done. Loss: 0.7531 lr:0.100000 network_time: 0.0267
111
+ [ Thu Sep 15 21:56:25 2022 ] Batch(107/162) done. Loss: 0.9298 lr:0.100000 network_time: 0.0305
112
+ [ Thu Sep 15 21:57:04 2022 ] Eval epoch: 17
113
+ [ Thu Sep 15 21:58:52 2022 ] Mean test loss of 930 batches: 2.701462984085083.
114
+ [ Thu Sep 15 21:58:53 2022 ] Top1: 41.56%
115
+ [ Thu Sep 15 21:58:53 2022 ] Top5: 72.56%
116
+ [ Thu Sep 15 21:58:54 2022 ] Training epoch: 18
117
+ [ Thu Sep 15 21:59:30 2022 ] Batch(45/162) done. Loss: 0.9319 lr:0.100000 network_time: 0.0270
118
+ [ Thu Sep 15 22:00:43 2022 ] Batch(145/162) done. Loss: 0.5295 lr:0.100000 network_time: 0.0275
119
+ [ Thu Sep 15 22:00:55 2022 ] Eval epoch: 18
120
+ [ Thu Sep 15 22:02:43 2022 ] Mean test loss of 930 batches: 2.777517080307007.
121
+ [ Thu Sep 15 22:02:43 2022 ] Top1: 42.65%
122
+ [ Thu Sep 15 22:02:43 2022 ] Top5: 73.63%
123
+ [ Thu Sep 15 22:02:44 2022 ] Training epoch: 19
124
+ [ Thu Sep 15 22:03:48 2022 ] Batch(83/162) done. Loss: 0.5384 lr:0.100000 network_time: 0.0337
125
+ [ Thu Sep 15 22:04:45 2022 ] Eval epoch: 19
126
+ [ Thu Sep 15 22:06:33 2022 ] Mean test loss of 930 batches: 2.634265422821045.
127
+ [ Thu Sep 15 22:06:34 2022 ] Top1: 43.27%
128
+ [ Thu Sep 15 22:06:34 2022 ] Top5: 73.45%
129
+ [ Thu Sep 15 22:06:34 2022 ] Training epoch: 20
130
+ [ Thu Sep 15 22:06:54 2022 ] Batch(21/162) done. Loss: 0.9205 lr:0.100000 network_time: 0.0314
131
+ [ Thu Sep 15 22:08:07 2022 ] Batch(121/162) done. Loss: 0.5672 lr:0.100000 network_time: 0.0334
132
+ [ Thu Sep 15 22:08:36 2022 ] Eval epoch: 20
133
+ [ Thu Sep 15 22:10:24 2022 ] Mean test loss of 930 batches: 2.6961050033569336.
134
+ [ Thu Sep 15 22:10:24 2022 ] Top1: 42.92%
135
+ [ Thu Sep 15 22:10:25 2022 ] Top5: 73.06%
136
+ [ Thu Sep 15 22:10:25 2022 ] Training epoch: 21
137
+ [ Thu Sep 15 22:11:12 2022 ] Batch(59/162) done. Loss: 0.4838 lr:0.100000 network_time: 0.0336
138
+ [ Thu Sep 15 22:12:25 2022 ] Batch(159/162) done. Loss: 0.8854 lr:0.100000 network_time: 0.0309
139
+ [ Thu Sep 15 22:12:27 2022 ] Eval epoch: 21
140
+ [ Thu Sep 15 22:14:15 2022 ] Mean test loss of 930 batches: 2.5363717079162598.
141
+ [ Thu Sep 15 22:14:16 2022 ] Top1: 43.11%
142
+ [ Thu Sep 15 22:14:16 2022 ] Top5: 73.08%
143
+ [ Thu Sep 15 22:14:16 2022 ] Training epoch: 22
144
+ [ Thu Sep 15 22:15:31 2022 ] Batch(97/162) done. Loss: 0.6675 lr:0.100000 network_time: 0.0303
145
+ [ Thu Sep 15 22:16:18 2022 ] Eval epoch: 22
146
+ [ Thu Sep 15 22:18:07 2022 ] Mean test loss of 930 batches: 2.5331027507781982.
147
+ [ Thu Sep 15 22:18:07 2022 ] Top1: 45.46%
148
+ [ Thu Sep 15 22:18:07 2022 ] Top5: 73.80%
149
+ [ Thu Sep 15 22:18:08 2022 ] Training epoch: 23
150
+ [ Thu Sep 15 22:18:37 2022 ] Batch(35/162) done. Loss: 0.6239 lr:0.100000 network_time: 0.0270
151
+ [ Thu Sep 15 22:19:49 2022 ] Batch(135/162) done. Loss: 0.4228 lr:0.100000 network_time: 0.0261
152
+ [ Thu Sep 15 22:20:08 2022 ] Eval epoch: 23
153
+ [ Thu Sep 15 22:21:56 2022 ] Mean test loss of 930 batches: 2.4731662273406982.
154
+ [ Thu Sep 15 22:21:57 2022 ] Top1: 44.55%
155
+ [ Thu Sep 15 22:21:57 2022 ] Top5: 74.01%
156
+ [ Thu Sep 15 22:21:58 2022 ] Training epoch: 24
157
+ [ Thu Sep 15 22:22:55 2022 ] Batch(73/162) done. Loss: 0.4733 lr:0.100000 network_time: 0.0276
158
+ [ Thu Sep 15 22:23:59 2022 ] Eval epoch: 24
159
+ [ Thu Sep 15 22:25:47 2022 ] Mean test loss of 930 batches: 2.79464054107666.
160
+ [ Thu Sep 15 22:25:47 2022 ] Top1: 44.01%
161
+ [ Thu Sep 15 22:25:48 2022 ] Top5: 73.97%
162
+ [ Thu Sep 15 22:25:48 2022 ] Training epoch: 25
163
+ [ Thu Sep 15 22:26:00 2022 ] Batch(11/162) done. Loss: 0.4082 lr:0.100000 network_time: 0.0352
164
+ [ Thu Sep 15 22:27:13 2022 ] Batch(111/162) done. Loss: 0.5356 lr:0.100000 network_time: 0.0281
165
+ [ Thu Sep 15 22:27:49 2022 ] Eval epoch: 25
166
+ [ Thu Sep 15 22:29:37 2022 ] Mean test loss of 930 batches: 2.302302598953247.
167
+ [ Thu Sep 15 22:29:38 2022 ] Top1: 46.27%
168
+ [ Thu Sep 15 22:29:38 2022 ] Top5: 76.14%
169
+ [ Thu Sep 15 22:29:38 2022 ] Training epoch: 26
170
+ [ Thu Sep 15 22:30:18 2022 ] Batch(49/162) done. Loss: 0.4845 lr:0.100000 network_time: 0.0290
171
+ [ Thu Sep 15 22:31:30 2022 ] Batch(149/162) done. Loss: 0.7163 lr:0.100000 network_time: 0.0271
172
+ [ Thu Sep 15 22:31:39 2022 ] Eval epoch: 26
173
+ [ Thu Sep 15 22:33:27 2022 ] Mean test loss of 930 batches: 2.5283055305480957.
174
+ [ Thu Sep 15 22:33:28 2022 ] Top1: 47.49%
175
+ [ Thu Sep 15 22:33:28 2022 ] Top5: 77.84%
176
+ [ Thu Sep 15 22:33:28 2022 ] Training epoch: 27
177
+ [ Thu Sep 15 22:34:36 2022 ] Batch(87/162) done. Loss: 0.5433 lr:0.100000 network_time: 0.0330
178
+ [ Thu Sep 15 22:35:30 2022 ] Eval epoch: 27
179
+ [ Thu Sep 15 22:37:18 2022 ] Mean test loss of 930 batches: 2.756495714187622.
180
+ [ Thu Sep 15 22:37:18 2022 ] Top1: 43.80%
181
+ [ Thu Sep 15 22:37:19 2022 ] Top5: 73.11%
182
+ [ Thu Sep 15 22:37:19 2022 ] Training epoch: 28
183
+ [ Thu Sep 15 22:37:41 2022 ] Batch(25/162) done. Loss: 0.3972 lr:0.100000 network_time: 0.0268
184
+ [ Thu Sep 15 22:38:54 2022 ] Batch(125/162) done. Loss: 0.6571 lr:0.100000 network_time: 0.0271
185
+ [ Thu Sep 15 22:39:20 2022 ] Eval epoch: 28
186
+ [ Thu Sep 15 22:41:08 2022 ] Mean test loss of 930 batches: 2.6355648040771484.
187
+ [ Thu Sep 15 22:41:08 2022 ] Top1: 46.46%
188
+ [ Thu Sep 15 22:41:09 2022 ] Top5: 75.25%
189
+ [ Thu Sep 15 22:41:09 2022 ] Training epoch: 29
190
+ [ Thu Sep 15 22:41:59 2022 ] Batch(63/162) done. Loss: 0.4874 lr:0.100000 network_time: 0.0272
191
+ [ Thu Sep 15 22:43:10 2022 ] Eval epoch: 29
192
+ [ Thu Sep 15 22:44:58 2022 ] Mean test loss of 930 batches: 2.8874144554138184.
193
+ [ Thu Sep 15 22:44:58 2022 ] Top1: 44.57%
194
+ [ Thu Sep 15 22:44:59 2022 ] Top5: 74.14%
195
+ [ Thu Sep 15 22:44:59 2022 ] Training epoch: 30
196
+ [ Thu Sep 15 22:45:03 2022 ] Batch(1/162) done. Loss: 0.3427 lr:0.100000 network_time: 0.0323
197
+ [ Thu Sep 15 22:46:16 2022 ] Batch(101/162) done. Loss: 0.4298 lr:0.100000 network_time: 0.0261
198
+ [ Thu Sep 15 22:47:00 2022 ] Eval epoch: 30
199
+ [ Thu Sep 15 22:48:48 2022 ] Mean test loss of 930 batches: 2.7167603969573975.
200
+ [ Thu Sep 15 22:48:48 2022 ] Top1: 45.86%
201
+ [ Thu Sep 15 22:48:48 2022 ] Top5: 74.97%
202
+ [ Thu Sep 15 22:48:49 2022 ] Training epoch: 31
203
+ [ Thu Sep 15 22:49:21 2022 ] Batch(39/162) done. Loss: 0.3104 lr:0.100000 network_time: 0.0259
204
+ [ Thu Sep 15 22:50:34 2022 ] Batch(139/162) done. Loss: 0.5252 lr:0.100000 network_time: 0.0329
205
+ [ Thu Sep 15 22:50:50 2022 ] Eval epoch: 31
206
+ [ Thu Sep 15 22:52:38 2022 ] Mean test loss of 930 batches: 2.8605589866638184.
207
+ [ Thu Sep 15 22:52:39 2022 ] Top1: 44.77%
208
+ [ Thu Sep 15 22:52:39 2022 ] Top5: 73.49%
209
+ [ Thu Sep 15 22:52:39 2022 ] Training epoch: 32
210
+ [ Thu Sep 15 22:53:39 2022 ] Batch(77/162) done. Loss: 0.3644 lr:0.100000 network_time: 0.0301
211
+ [ Thu Sep 15 22:54:40 2022 ] Eval epoch: 32
212
+ [ Thu Sep 15 22:56:28 2022 ] Mean test loss of 930 batches: 2.744288682937622.
213
+ [ Thu Sep 15 22:56:29 2022 ] Top1: 45.83%
214
+ [ Thu Sep 15 22:56:29 2022 ] Top5: 74.21%
215
+ [ Thu Sep 15 22:56:29 2022 ] Training epoch: 33
216
+ [ Thu Sep 15 22:56:44 2022 ] Batch(15/162) done. Loss: 0.2899 lr:0.100000 network_time: 0.0267
217
+ [ Thu Sep 15 22:57:57 2022 ] Batch(115/162) done. Loss: 0.9063 lr:0.100000 network_time: 0.0266
218
+ [ Thu Sep 15 22:58:30 2022 ] Eval epoch: 33
219
+ [ Thu Sep 15 23:00:19 2022 ] Mean test loss of 930 batches: 2.8654515743255615.
220
+ [ Thu Sep 15 23:00:19 2022 ] Top1: 46.32%
221
+ [ Thu Sep 15 23:00:20 2022 ] Top5: 75.20%
222
+ [ Thu Sep 15 23:00:20 2022 ] Training epoch: 34
223
+ [ Thu Sep 15 23:01:02 2022 ] Batch(53/162) done. Loss: 0.5861 lr:0.100000 network_time: 0.0301
224
+ [ Thu Sep 15 23:02:15 2022 ] Batch(153/162) done. Loss: 0.5252 lr:0.100000 network_time: 0.0274
225
+ [ Thu Sep 15 23:02:21 2022 ] Eval epoch: 34
226
+ [ Thu Sep 15 23:04:10 2022 ] Mean test loss of 930 batches: 2.6413774490356445.
227
+ [ Thu Sep 15 23:04:10 2022 ] Top1: 48.05%
228
+ [ Thu Sep 15 23:04:10 2022 ] Top5: 76.22%
229
+ [ Thu Sep 15 23:04:11 2022 ] Training epoch: 35
230
+ [ Thu Sep 15 23:05:20 2022 ] Batch(91/162) done. Loss: 0.7987 lr:0.100000 network_time: 0.0278
231
+ [ Thu Sep 15 23:06:11 2022 ] Eval epoch: 35
232
+ [ Thu Sep 15 23:07:59 2022 ] Mean test loss of 930 batches: 3.157620906829834.
233
+ [ Thu Sep 15 23:08:00 2022 ] Top1: 44.87%
234
+ [ Thu Sep 15 23:08:00 2022 ] Top5: 74.35%
235
+ [ Thu Sep 15 23:08:01 2022 ] Training epoch: 36
236
+ [ Thu Sep 15 23:08:25 2022 ] Batch(29/162) done. Loss: 0.3202 lr:0.100000 network_time: 0.0271
237
+ [ Thu Sep 15 23:09:38 2022 ] Batch(129/162) done. Loss: 0.3796 lr:0.100000 network_time: 0.0313
238
+ [ Thu Sep 15 23:10:01 2022 ] Eval epoch: 36
239
+ [ Thu Sep 15 23:11:49 2022 ] Mean test loss of 930 batches: 2.9143733978271484.
240
+ [ Thu Sep 15 23:11:50 2022 ] Top1: 46.78%
241
+ [ Thu Sep 15 23:11:50 2022 ] Top5: 75.46%
242
+ [ Thu Sep 15 23:11:50 2022 ] Training epoch: 37
243
+ [ Thu Sep 15 23:12:43 2022 ] Batch(67/162) done. Loss: 0.3318 lr:0.100000 network_time: 0.0267
244
+ [ Thu Sep 15 23:13:52 2022 ] Eval epoch: 37
245
+ [ Thu Sep 15 23:15:40 2022 ] Mean test loss of 930 batches: 2.5134196281433105.
246
+ [ Thu Sep 15 23:15:40 2022 ] Top1: 47.01%
247
+ [ Thu Sep 15 23:15:40 2022 ] Top5: 75.64%
248
+ [ Thu Sep 15 23:15:41 2022 ] Training epoch: 38
249
+ [ Thu Sep 15 23:15:48 2022 ] Batch(5/162) done. Loss: 0.4636 lr:0.100000 network_time: 0.0310
250
+ [ Thu Sep 15 23:17:01 2022 ] Batch(105/162) done. Loss: 0.2994 lr:0.100000 network_time: 0.0606
251
+ [ Thu Sep 15 23:17:42 2022 ] Eval epoch: 38
252
+ [ Thu Sep 15 23:19:30 2022 ] Mean test loss of 930 batches: 2.8436739444732666.
253
+ [ Thu Sep 15 23:19:30 2022 ] Top1: 46.69%
254
+ [ Thu Sep 15 23:19:31 2022 ] Top5: 75.47%
255
+ [ Thu Sep 15 23:19:31 2022 ] Training epoch: 39
256
+ [ Thu Sep 15 23:20:06 2022 ] Batch(43/162) done. Loss: 0.1927 lr:0.100000 network_time: 0.0289
257
+ [ Thu Sep 15 23:21:19 2022 ] Batch(143/162) done. Loss: 0.4042 lr:0.100000 network_time: 0.0264
258
+ [ Thu Sep 15 23:21:32 2022 ] Eval epoch: 39
259
+ [ Thu Sep 15 23:23:20 2022 ] Mean test loss of 930 batches: 2.809511423110962.
260
+ [ Thu Sep 15 23:23:20 2022 ] Top1: 45.66%
261
+ [ Thu Sep 15 23:23:21 2022 ] Top5: 74.96%
262
+ [ Thu Sep 15 23:23:21 2022 ] Training epoch: 40
263
+ [ Thu Sep 15 23:24:24 2022 ] Batch(81/162) done. Loss: 0.3760 lr:0.100000 network_time: 0.0407
264
+ [ Thu Sep 15 23:25:22 2022 ] Eval epoch: 40
265
+ [ Thu Sep 15 23:27:11 2022 ] Mean test loss of 930 batches: 2.7429559230804443.
266
+ [ Thu Sep 15 23:27:11 2022 ] Top1: 45.55%
267
+ [ Thu Sep 15 23:27:12 2022 ] Top5: 74.78%
268
+ [ Thu Sep 15 23:27:12 2022 ] Training epoch: 41
269
+ [ Thu Sep 15 23:27:30 2022 ] Batch(19/162) done. Loss: 0.5173 lr:0.100000 network_time: 0.0312
270
+ [ Thu Sep 15 23:28:43 2022 ] Batch(119/162) done. Loss: 0.6124 lr:0.100000 network_time: 0.0272
271
+ [ Thu Sep 15 23:29:13 2022 ] Eval epoch: 41
272
+ [ Thu Sep 15 23:31:01 2022 ] Mean test loss of 930 batches: 2.9963934421539307.
273
+ [ Thu Sep 15 23:31:01 2022 ] Top1: 45.14%
274
+ [ Thu Sep 15 23:31:02 2022 ] Top5: 74.59%
275
+ [ Thu Sep 15 23:31:02 2022 ] Training epoch: 42
276
+ [ Thu Sep 15 23:31:47 2022 ] Batch(57/162) done. Loss: 0.4068 lr:0.100000 network_time: 0.0283
277
+ [ Thu Sep 15 23:33:00 2022 ] Batch(157/162) done. Loss: 0.3797 lr:0.100000 network_time: 0.0312
278
+ [ Thu Sep 15 23:33:03 2022 ] Eval epoch: 42
279
+ [ Thu Sep 15 23:34:51 2022 ] Mean test loss of 930 batches: 2.8139801025390625.
280
+ [ Thu Sep 15 23:34:52 2022 ] Top1: 46.93%
281
+ [ Thu Sep 15 23:34:52 2022 ] Top5: 75.50%
282
+ [ Thu Sep 15 23:34:52 2022 ] Training epoch: 43
283
+ [ Thu Sep 15 23:36:05 2022 ] Batch(95/162) done. Loss: 0.3201 lr:0.100000 network_time: 0.0307
284
+ [ Thu Sep 15 23:36:53 2022 ] Eval epoch: 43
285
+ [ Thu Sep 15 23:38:42 2022 ] Mean test loss of 930 batches: 3.052001953125.
286
+ [ Thu Sep 15 23:38:42 2022 ] Top1: 42.91%
287
+ [ Thu Sep 15 23:38:43 2022 ] Top5: 74.33%
288
+ [ Thu Sep 15 23:38:43 2022 ] Training epoch: 44
289
+ [ Thu Sep 15 23:39:11 2022 ] Batch(33/162) done. Loss: 0.1518 lr:0.100000 network_time: 0.0299
290
+ [ Thu Sep 15 23:40:24 2022 ] Batch(133/162) done. Loss: 0.3774 lr:0.100000 network_time: 0.0262
291
+ [ Thu Sep 15 23:40:44 2022 ] Eval epoch: 44
292
+ [ Thu Sep 15 23:42:33 2022 ] Mean test loss of 930 batches: 2.9345126152038574.
293
+ [ Thu Sep 15 23:42:33 2022 ] Top1: 47.30%
294
+ [ Thu Sep 15 23:42:34 2022 ] Top5: 75.75%
295
+ [ Thu Sep 15 23:42:34 2022 ] Training epoch: 45
296
+ [ Thu Sep 15 23:43:29 2022 ] Batch(71/162) done. Loss: 0.4863 lr:0.100000 network_time: 0.0283
297
+ [ Thu Sep 15 23:44:35 2022 ] Eval epoch: 45
298
+ [ Thu Sep 15 23:46:23 2022 ] Mean test loss of 930 batches: 2.7627146244049072.
299
+ [ Thu Sep 15 23:46:24 2022 ] Top1: 47.54%
300
+ [ Thu Sep 15 23:46:24 2022 ] Top5: 76.17%
301
+ [ Thu Sep 15 23:46:24 2022 ] Training epoch: 46
302
+ [ Thu Sep 15 23:46:34 2022 ] Batch(9/162) done. Loss: 0.2246 lr:0.100000 network_time: 0.0265
303
+ [ Thu Sep 15 23:47:47 2022 ] Batch(109/162) done. Loss: 0.5166 lr:0.100000 network_time: 0.0274
304
+ [ Thu Sep 15 23:48:25 2022 ] Eval epoch: 46
305
+ [ Thu Sep 15 23:50:13 2022 ] Mean test loss of 930 batches: 2.7105462551116943.
306
+ [ Thu Sep 15 23:50:14 2022 ] Top1: 47.83%
307
+ [ Thu Sep 15 23:50:14 2022 ] Top5: 77.36%
308
+ [ Thu Sep 15 23:50:15 2022 ] Training epoch: 47
309
+ [ Thu Sep 15 23:50:52 2022 ] Batch(47/162) done. Loss: 0.2201 lr:0.100000 network_time: 0.0276
310
+ [ Thu Sep 15 23:52:05 2022 ] Batch(147/162) done. Loss: 0.3085 lr:0.100000 network_time: 0.0307
311
+ [ Thu Sep 15 23:52:16 2022 ] Eval epoch: 47
312
+ [ Thu Sep 15 23:54:04 2022 ] Mean test loss of 930 batches: 2.8468735218048096.
313
+ [ Thu Sep 15 23:54:04 2022 ] Top1: 46.37%
314
+ [ Thu Sep 15 23:54:05 2022 ] Top5: 75.79%
315
+ [ Thu Sep 15 23:54:05 2022 ] Training epoch: 48
316
+ [ Thu Sep 15 23:55:11 2022 ] Batch(85/162) done. Loss: 0.4530 lr:0.100000 network_time: 0.0276
317
+ [ Thu Sep 15 23:56:06 2022 ] Eval epoch: 48
318
+ [ Thu Sep 15 23:57:54 2022 ] Mean test loss of 930 batches: 2.7094438076019287.
319
+ [ Thu Sep 15 23:57:55 2022 ] Top1: 47.79%
320
+ [ Thu Sep 15 23:57:55 2022 ] Top5: 76.91%
321
+ [ Thu Sep 15 23:57:55 2022 ] Training epoch: 49
322
+ [ Thu Sep 15 23:58:16 2022 ] Batch(23/162) done. Loss: 0.3716 lr:0.100000 network_time: 0.0382
323
+ [ Thu Sep 15 23:59:29 2022 ] Batch(123/162) done. Loss: 0.2848 lr:0.100000 network_time: 0.0328
324
+ [ Thu Sep 15 23:59:57 2022 ] Eval epoch: 49
325
+ [ Fri Sep 16 00:01:45 2022 ] Mean test loss of 930 batches: 3.2839035987854004.
326
+ [ Fri Sep 16 00:01:45 2022 ] Top1: 44.72%
327
+ [ Fri Sep 16 00:01:46 2022 ] Top5: 72.77%
328
+ [ Fri Sep 16 00:01:46 2022 ] Training epoch: 50
329
+ [ Fri Sep 16 00:02:34 2022 ] Batch(61/162) done. Loss: 0.3227 lr:0.100000 network_time: 0.0309
330
+ [ Fri Sep 16 00:03:47 2022 ] Batch(161/162) done. Loss: 0.3406 lr:0.100000 network_time: 0.0273
331
+ [ Fri Sep 16 00:03:47 2022 ] Eval epoch: 50
332
+ [ Fri Sep 16 00:05:35 2022 ] Mean test loss of 930 batches: 2.811931848526001.
333
+ [ Fri Sep 16 00:05:35 2022 ] Top1: 48.72%
334
+ [ Fri Sep 16 00:05:36 2022 ] Top5: 77.38%
335
+ [ Fri Sep 16 00:05:36 2022 ] Training epoch: 51
336
+ [ Fri Sep 16 00:06:52 2022 ] Batch(99/162) done. Loss: 0.3076 lr:0.100000 network_time: 0.0271
337
+ [ Fri Sep 16 00:07:37 2022 ] Eval epoch: 51
338
+ [ Fri Sep 16 00:09:26 2022 ] Mean test loss of 930 batches: 3.179542064666748.
339
+ [ Fri Sep 16 00:09:26 2022 ] Top1: 45.92%
340
+ [ Fri Sep 16 00:09:27 2022 ] Top5: 74.47%
341
+ [ Fri Sep 16 00:09:27 2022 ] Training epoch: 52
342
+ [ Fri Sep 16 00:09:57 2022 ] Batch(37/162) done. Loss: 0.1807 lr:0.100000 network_time: 0.0298
343
+ [ Fri Sep 16 00:11:10 2022 ] Batch(137/162) done. Loss: 0.4173 lr:0.100000 network_time: 0.0274
344
+ [ Fri Sep 16 00:11:28 2022 ] Eval epoch: 52
345
+ [ Fri Sep 16 00:13:16 2022 ] Mean test loss of 930 batches: 2.9092044830322266.
346
+ [ Fri Sep 16 00:13:16 2022 ] Top1: 45.58%
347
+ [ Fri Sep 16 00:13:17 2022 ] Top5: 74.35%
348
+ [ Fri Sep 16 00:13:17 2022 ] Training epoch: 53
349
+ [ Fri Sep 16 00:14:15 2022 ] Batch(75/162) done. Loss: 0.3810 lr:0.100000 network_time: 0.0316
350
+ [ Fri Sep 16 00:15:18 2022 ] Eval epoch: 53
351
+ [ Fri Sep 16 00:17:06 2022 ] Mean test loss of 930 batches: 3.3449065685272217.
352
+ [ Fri Sep 16 00:17:06 2022 ] Top1: 44.76%
353
+ [ Fri Sep 16 00:17:07 2022 ] Top5: 74.22%
354
+ [ Fri Sep 16 00:17:07 2022 ] Training epoch: 54
355
+ [ Fri Sep 16 00:17:20 2022 ] Batch(13/162) done. Loss: 0.0814 lr:0.100000 network_time: 0.0300
356
+ [ Fri Sep 16 00:18:33 2022 ] Batch(113/162) done. Loss: 0.2477 lr:0.100000 network_time: 0.0257
357
+ [ Fri Sep 16 00:19:08 2022 ] Eval epoch: 54
358
+ [ Fri Sep 16 00:20:56 2022 ] Mean test loss of 930 batches: 2.650818109512329.
359
+ [ Fri Sep 16 00:20:56 2022 ] Top1: 47.98%
360
+ [ Fri Sep 16 00:20:57 2022 ] Top5: 77.17%
361
+ [ Fri Sep 16 00:20:57 2022 ] Training epoch: 55
362
+ [ Fri Sep 16 00:21:38 2022 ] Batch(51/162) done. Loss: 0.1965 lr:0.100000 network_time: 0.0275
363
+ [ Fri Sep 16 00:22:51 2022 ] Batch(151/162) done. Loss: 0.2623 lr:0.100000 network_time: 0.0273
364
+ [ Fri Sep 16 00:22:58 2022 ] Eval epoch: 55
365
+ [ Fri Sep 16 00:24:46 2022 ] Mean test loss of 930 batches: 2.9719905853271484.
366
+ [ Fri Sep 16 00:24:47 2022 ] Top1: 47.19%
367
+ [ Fri Sep 16 00:24:47 2022 ] Top5: 75.58%
368
+ [ Fri Sep 16 00:24:47 2022 ] Training epoch: 56
369
+ [ Fri Sep 16 00:25:56 2022 ] Batch(89/162) done. Loss: 0.1470 lr:0.100000 network_time: 0.0261
370
+ [ Fri Sep 16 00:26:48 2022 ] Eval epoch: 56
371
+ [ Fri Sep 16 00:28:37 2022 ] Mean test loss of 930 batches: 2.853518009185791.
372
+ [ Fri Sep 16 00:28:37 2022 ] Top1: 46.27%
373
+ [ Fri Sep 16 00:28:38 2022 ] Top5: 76.19%
374
+ [ Fri Sep 16 00:28:38 2022 ] Training epoch: 57
375
+ [ Fri Sep 16 00:29:01 2022 ] Batch(27/162) done. Loss: 0.1605 lr:0.100000 network_time: 0.0288
376
+ [ Fri Sep 16 00:30:14 2022 ] Batch(127/162) done. Loss: 0.2337 lr:0.100000 network_time: 0.0265
377
+ [ Fri Sep 16 00:30:39 2022 ] Eval epoch: 57
378
+ [ Fri Sep 16 00:32:27 2022 ] Mean test loss of 930 batches: 2.9228341579437256.
379
+ [ Fri Sep 16 00:32:27 2022 ] Top1: 45.10%
380
+ [ Fri Sep 16 00:32:28 2022 ] Top5: 75.00%
381
+ [ Fri Sep 16 00:32:28 2022 ] Training epoch: 58
382
+ [ Fri Sep 16 00:33:19 2022 ] Batch(65/162) done. Loss: 0.1659 lr:0.100000 network_time: 0.0264
383
+ [ Fri Sep 16 00:34:29 2022 ] Eval epoch: 58
384
+ [ Fri Sep 16 00:36:17 2022 ] Mean test loss of 930 batches: 3.072599172592163.
385
+ [ Fri Sep 16 00:36:17 2022 ] Top1: 46.37%
386
+ [ Fri Sep 16 00:36:18 2022 ] Top5: 74.37%
387
+ [ Fri Sep 16 00:36:18 2022 ] Training epoch: 59
388
+ [ Fri Sep 16 00:36:23 2022 ] Batch(3/162) done. Loss: 0.1019 lr:0.100000 network_time: 0.0279
389
+ [ Fri Sep 16 00:37:36 2022 ] Batch(103/162) done. Loss: 0.2214 lr:0.100000 network_time: 0.0281
390
+ [ Fri Sep 16 00:38:19 2022 ] Eval epoch: 59
391
+ [ Fri Sep 16 00:40:07 2022 ] Mean test loss of 930 batches: 2.779177188873291.
392
+ [ Fri Sep 16 00:40:07 2022 ] Top1: 46.86%
393
+ [ Fri Sep 16 00:40:08 2022 ] Top5: 75.80%
394
+ [ Fri Sep 16 00:40:08 2022 ] Training epoch: 60
395
+ [ Fri Sep 16 00:40:41 2022 ] Batch(41/162) done. Loss: 0.2597 lr:0.100000 network_time: 0.0280
396
+ [ Fri Sep 16 00:41:54 2022 ] Batch(141/162) done. Loss: 0.1924 lr:0.100000 network_time: 0.0312
397
+ [ Fri Sep 16 00:42:09 2022 ] Eval epoch: 60
398
+ [ Fri Sep 16 00:43:57 2022 ] Mean test loss of 930 batches: 2.9837682247161865.
399
+ [ Fri Sep 16 00:43:58 2022 ] Top1: 47.59%
400
+ [ Fri Sep 16 00:43:58 2022 ] Top5: 76.13%
401
+ [ Fri Sep 16 00:43:58 2022 ] Training epoch: 61
402
+ [ Fri Sep 16 00:44:59 2022 ] Batch(79/162) done. Loss: 0.1124 lr:0.010000 network_time: 0.0279
403
+ [ Fri Sep 16 00:45:59 2022 ] Eval epoch: 61
404
+ [ Fri Sep 16 00:47:48 2022 ] Mean test loss of 930 batches: 2.455381155014038.
405
+ [ Fri Sep 16 00:47:48 2022 ] Top1: 53.95%
406
+ [ Fri Sep 16 00:47:49 2022 ] Top5: 80.31%
407
+ [ Fri Sep 16 00:47:49 2022 ] Training epoch: 62
408
+ [ Fri Sep 16 00:48:05 2022 ] Batch(17/162) done. Loss: 0.0396 lr:0.010000 network_time: 0.0237
409
+ [ Fri Sep 16 00:49:18 2022 ] Batch(117/162) done. Loss: 0.0511 lr:0.010000 network_time: 0.0274
410
+ [ Fri Sep 16 00:49:50 2022 ] Eval epoch: 62
411
+ [ Fri Sep 16 00:51:39 2022 ] Mean test loss of 930 batches: 2.414081335067749.
412
+ [ Fri Sep 16 00:51:40 2022 ] Top1: 54.59%
413
+ [ Fri Sep 16 00:51:40 2022 ] Top5: 80.92%
414
+ [ Fri Sep 16 00:51:40 2022 ] Training epoch: 63
415
+ [ Fri Sep 16 00:52:24 2022 ] Batch(55/162) done. Loss: 0.0494 lr:0.010000 network_time: 0.0276
416
+ [ Fri Sep 16 00:53:37 2022 ] Batch(155/162) done. Loss: 0.0421 lr:0.010000 network_time: 0.0271
417
+ [ Fri Sep 16 00:53:41 2022 ] Eval epoch: 63
418
+ [ Fri Sep 16 00:55:30 2022 ] Mean test loss of 930 batches: 2.435600996017456.
419
+ [ Fri Sep 16 00:55:30 2022 ] Top1: 54.98%
420
+ [ Fri Sep 16 00:55:31 2022 ] Top5: 81.13%
421
+ [ Fri Sep 16 00:55:31 2022 ] Training epoch: 64
422
+ [ Fri Sep 16 00:56:42 2022 ] Batch(93/162) done. Loss: 0.0951 lr:0.010000 network_time: 0.0278
423
+ [ Fri Sep 16 00:57:32 2022 ] Eval epoch: 64
424
+ [ Fri Sep 16 00:59:20 2022 ] Mean test loss of 930 batches: 2.467794418334961.
425
+ [ Fri Sep 16 00:59:21 2022 ] Top1: 54.78%
426
+ [ Fri Sep 16 00:59:21 2022 ] Top5: 81.15%
427
+ [ Fri Sep 16 00:59:21 2022 ] Training epoch: 65
428
+ [ Fri Sep 16 00:59:47 2022 ] Batch(31/162) done. Loss: 0.0189 lr:0.010000 network_time: 0.0267
429
+ [ Fri Sep 16 01:01:00 2022 ] Batch(131/162) done. Loss: 0.0475 lr:0.010000 network_time: 0.0311
430
+ [ Fri Sep 16 01:01:22 2022 ] Eval epoch: 65
431
+ [ Fri Sep 16 01:03:11 2022 ] Mean test loss of 930 batches: 2.487011194229126.
432
+ [ Fri Sep 16 01:03:11 2022 ] Top1: 54.64%
433
+ [ Fri Sep 16 01:03:12 2022 ] Top5: 80.91%
434
+ [ Fri Sep 16 01:03:12 2022 ] Training epoch: 66
435
+ [ Fri Sep 16 01:04:06 2022 ] Batch(69/162) done. Loss: 0.0478 lr:0.010000 network_time: 0.0262
436
+ [ Fri Sep 16 01:05:13 2022 ] Eval epoch: 66
437
+ [ Fri Sep 16 01:07:00 2022 ] Mean test loss of 930 batches: 2.487764596939087.
438
+ [ Fri Sep 16 01:07:01 2022 ] Top1: 54.90%
439
+ [ Fri Sep 16 01:07:01 2022 ] Top5: 81.02%
440
+ [ Fri Sep 16 01:07:02 2022 ] Training epoch: 67
441
+ [ Fri Sep 16 01:07:10 2022 ] Batch(7/162) done. Loss: 0.0119 lr:0.010000 network_time: 0.0318
442
+ [ Fri Sep 16 01:08:23 2022 ] Batch(107/162) done. Loss: 0.0186 lr:0.010000 network_time: 0.0270
443
+ [ Fri Sep 16 01:09:03 2022 ] Eval epoch: 67
444
+ [ Fri Sep 16 01:10:51 2022 ] Mean test loss of 930 batches: 2.4871976375579834.
445
+ [ Fri Sep 16 01:10:51 2022 ] Top1: 55.07%
446
+ [ Fri Sep 16 01:10:52 2022 ] Top5: 81.23%
447
+ [ Fri Sep 16 01:10:52 2022 ] Training epoch: 68
448
+ [ Fri Sep 16 01:11:29 2022 ] Batch(45/162) done. Loss: 0.0457 lr:0.010000 network_time: 0.0288
449
+ [ Fri Sep 16 01:12:41 2022 ] Batch(145/162) done. Loss: 0.0222 lr:0.010000 network_time: 0.0276
450
+ [ Fri Sep 16 01:12:53 2022 ] Eval epoch: 68
451
+ [ Fri Sep 16 01:14:41 2022 ] Mean test loss of 930 batches: 2.5081300735473633.
452
+ [ Fri Sep 16 01:14:41 2022 ] Top1: 54.78%
453
+ [ Fri Sep 16 01:14:42 2022 ] Top5: 80.99%
454
+ [ Fri Sep 16 01:14:42 2022 ] Training epoch: 69
455
+ [ Fri Sep 16 01:15:46 2022 ] Batch(83/162) done. Loss: 0.0238 lr:0.010000 network_time: 0.0341
456
+ [ Fri Sep 16 01:16:43 2022 ] Eval epoch: 69
457
+ [ Fri Sep 16 01:18:31 2022 ] Mean test loss of 930 batches: 2.4945333003997803.
458
+ [ Fri Sep 16 01:18:32 2022 ] Top1: 55.12%
459
+ [ Fri Sep 16 01:18:32 2022 ] Top5: 81.14%
460
+ [ Fri Sep 16 01:18:32 2022 ] Training epoch: 70
461
+ [ Fri Sep 16 01:18:52 2022 ] Batch(21/162) done. Loss: 0.0233 lr:0.010000 network_time: 0.0275
462
+ [ Fri Sep 16 01:20:05 2022 ] Batch(121/162) done. Loss: 0.0358 lr:0.010000 network_time: 0.0266
463
+ [ Fri Sep 16 01:20:34 2022 ] Eval epoch: 70
464
+ [ Fri Sep 16 01:22:22 2022 ] Mean test loss of 930 batches: 2.4906225204467773.
465
+ [ Fri Sep 16 01:22:23 2022 ] Top1: 55.35%
466
+ [ Fri Sep 16 01:22:23 2022 ] Top5: 81.29%
467
+ [ Fri Sep 16 01:22:23 2022 ] Training epoch: 71
468
+ [ Fri Sep 16 01:23:10 2022 ] Batch(59/162) done. Loss: 0.0117 lr:0.010000 network_time: 0.0309
469
+ [ Fri Sep 16 01:24:23 2022 ] Batch(159/162) done. Loss: 0.0134 lr:0.010000 network_time: 0.0263
470
+ [ Fri Sep 16 01:24:24 2022 ] Eval epoch: 71
471
+ [ Fri Sep 16 01:26:13 2022 ] Mean test loss of 930 batches: 2.5574259757995605.
472
+ [ Fri Sep 16 01:26:13 2022 ] Top1: 54.91%
473
+ [ Fri Sep 16 01:26:14 2022 ] Top5: 81.06%
474
+ [ Fri Sep 16 01:26:14 2022 ] Training epoch: 72
475
+ [ Fri Sep 16 01:27:28 2022 ] Batch(97/162) done. Loss: 0.0187 lr:0.010000 network_time: 0.0268
476
+ [ Fri Sep 16 01:28:15 2022 ] Eval epoch: 72
477
+ [ Fri Sep 16 01:30:03 2022 ] Mean test loss of 930 batches: 2.5424442291259766.
478
+ [ Fri Sep 16 01:30:03 2022 ] Top1: 55.18%
479
+ [ Fri Sep 16 01:30:04 2022 ] Top5: 81.15%
480
+ [ Fri Sep 16 01:30:04 2022 ] Training epoch: 73
481
+ [ Fri Sep 16 01:30:33 2022 ] Batch(35/162) done. Loss: 0.0086 lr:0.010000 network_time: 0.0283
482
+ [ Fri Sep 16 01:31:46 2022 ] Batch(135/162) done. Loss: 0.0168 lr:0.010000 network_time: 0.0320
483
+ [ Fri Sep 16 01:32:05 2022 ] Eval epoch: 73
484
+ [ Fri Sep 16 01:33:53 2022 ] Mean test loss of 930 batches: 2.5362462997436523.
485
+ [ Fri Sep 16 01:33:53 2022 ] Top1: 55.36%
486
+ [ Fri Sep 16 01:33:54 2022 ] Top5: 81.27%
487
+ [ Fri Sep 16 01:33:54 2022 ] Training epoch: 74
488
+ [ Fri Sep 16 01:34:51 2022 ] Batch(73/162) done. Loss: 0.0156 lr:0.010000 network_time: 0.0273
489
+ [ Fri Sep 16 01:35:55 2022 ] Eval epoch: 74
490
+ [ Fri Sep 16 01:37:43 2022 ] Mean test loss of 930 batches: 2.521191358566284.
491
+ [ Fri Sep 16 01:37:43 2022 ] Top1: 55.26%
492
+ [ Fri Sep 16 01:37:44 2022 ] Top5: 81.21%
493
+ [ Fri Sep 16 01:37:44 2022 ] Training epoch: 75
494
+ [ Fri Sep 16 01:37:56 2022 ] Batch(11/162) done. Loss: 0.0207 lr:0.010000 network_time: 0.0285
495
+ [ Fri Sep 16 01:39:08 2022 ] Batch(111/162) done. Loss: 0.0131 lr:0.010000 network_time: 0.0283
496
+ [ Fri Sep 16 01:39:45 2022 ] Eval epoch: 75
497
+ [ Fri Sep 16 01:41:33 2022 ] Mean test loss of 930 batches: 2.5355825424194336.
498
+ [ Fri Sep 16 01:41:33 2022 ] Top1: 55.33%
499
+ [ Fri Sep 16 01:41:34 2022 ] Top5: 81.14%
500
+ [ Fri Sep 16 01:41:34 2022 ] Training epoch: 76
501
+ [ Fri Sep 16 01:42:13 2022 ] Batch(49/162) done. Loss: 0.0105 lr:0.010000 network_time: 0.0347
502
+ [ Fri Sep 16 01:43:26 2022 ] Batch(149/162) done. Loss: 0.0263 lr:0.010000 network_time: 0.0265
503
+ [ Fri Sep 16 01:43:35 2022 ] Eval epoch: 76
504
+ [ Fri Sep 16 01:45:23 2022 ] Mean test loss of 930 batches: 2.538811683654785.
505
+ [ Fri Sep 16 01:45:24 2022 ] Top1: 55.36%
506
+ [ Fri Sep 16 01:45:24 2022 ] Top5: 81.18%
507
+ [ Fri Sep 16 01:45:24 2022 ] Training epoch: 77
508
+ [ Fri Sep 16 01:46:32 2022 ] Batch(87/162) done. Loss: 0.0133 lr:0.010000 network_time: 0.0324
509
+ [ Fri Sep 16 01:47:26 2022 ] Eval epoch: 77
510
+ [ Fri Sep 16 01:49:14 2022 ] Mean test loss of 930 batches: 2.534885883331299.
511
+ [ Fri Sep 16 01:49:15 2022 ] Top1: 55.49%
512
+ [ Fri Sep 16 01:49:15 2022 ] Top5: 81.30%
513
+ [ Fri Sep 16 01:49:15 2022 ] Training epoch: 78
514
+ [ Fri Sep 16 01:49:37 2022 ] Batch(25/162) done. Loss: 0.0092 lr:0.010000 network_time: 0.0270
515
+ [ Fri Sep 16 01:50:50 2022 ] Batch(125/162) done. Loss: 0.0150 lr:0.010000 network_time: 0.0286
516
+ [ Fri Sep 16 01:51:17 2022 ] Eval epoch: 78
517
+ [ Fri Sep 16 01:53:07 2022 ] Mean test loss of 930 batches: 2.521869421005249.
518
+ [ Fri Sep 16 01:53:07 2022 ] Top1: 55.60%
519
+ [ Fri Sep 16 01:53:08 2022 ] Top5: 81.24%
520
+ [ Fri Sep 16 01:53:08 2022 ] Training epoch: 79
521
+ [ Fri Sep 16 01:53:57 2022 ] Batch(63/162) done. Loss: 0.0122 lr:0.010000 network_time: 0.0437
522
+ [ Fri Sep 16 01:55:08 2022 ] Eval epoch: 79
523
+ [ Fri Sep 16 01:56:56 2022 ] Mean test loss of 930 batches: 2.5722897052764893.
524
+ [ Fri Sep 16 01:56:56 2022 ] Top1: 55.16%
525
+ [ Fri Sep 16 01:56:57 2022 ] Top5: 81.03%
526
+ [ Fri Sep 16 01:56:57 2022 ] Training epoch: 80
527
+ [ Fri Sep 16 01:57:02 2022 ] Batch(1/162) done. Loss: 0.0119 lr:0.010000 network_time: 0.0294
528
+ [ Fri Sep 16 01:58:14 2022 ] Batch(101/162) done. Loss: 0.0125 lr:0.010000 network_time: 0.0268
529
+ [ Fri Sep 16 01:58:58 2022 ] Eval epoch: 80
530
+ [ Fri Sep 16 02:00:46 2022 ] Mean test loss of 930 batches: 2.557574987411499.
531
+ [ Fri Sep 16 02:00:47 2022 ] Top1: 55.35%
532
+ [ Fri Sep 16 02:00:47 2022 ] Top5: 81.25%
533
+ [ Fri Sep 16 02:00:47 2022 ] Training epoch: 81
534
+ [ Fri Sep 16 02:01:19 2022 ] Batch(39/162) done. Loss: 0.0079 lr:0.001000 network_time: 0.0346
535
+ [ Fri Sep 16 02:02:32 2022 ] Batch(139/162) done. Loss: 0.0043 lr:0.001000 network_time: 0.0307
536
+ [ Fri Sep 16 02:02:49 2022 ] Eval epoch: 81
537
+ [ Fri Sep 16 02:04:36 2022 ] Mean test loss of 930 batches: 2.572951078414917.
538
+ [ Fri Sep 16 02:04:37 2022 ] Top1: 55.27%
539
+ [ Fri Sep 16 02:04:37 2022 ] Top5: 81.03%
540
+ [ Fri Sep 16 02:04:37 2022 ] Training epoch: 82
541
+ [ Fri Sep 16 02:05:37 2022 ] Batch(77/162) done. Loss: 0.0055 lr:0.001000 network_time: 0.0275
542
+ [ Fri Sep 16 02:06:38 2022 ] Eval epoch: 82
543
+ [ Fri Sep 16 02:08:26 2022 ] Mean test loss of 930 batches: 2.530529022216797.
544
+ [ Fri Sep 16 02:08:27 2022 ] Top1: 55.61%
545
+ [ Fri Sep 16 02:08:27 2022 ] Top5: 81.47%
546
+ [ Fri Sep 16 02:08:27 2022 ] Training epoch: 83
547
+ [ Fri Sep 16 02:08:42 2022 ] Batch(15/162) done. Loss: 0.0110 lr:0.001000 network_time: 0.0298
548
+ [ Fri Sep 16 02:09:54 2022 ] Batch(115/162) done. Loss: 0.0387 lr:0.001000 network_time: 0.0316
549
+ [ Fri Sep 16 02:10:28 2022 ] Eval epoch: 83
550
+ [ Fri Sep 16 02:12:17 2022 ] Mean test loss of 930 batches: 2.545494794845581.
551
+ [ Fri Sep 16 02:12:17 2022 ] Top1: 55.61%
552
+ [ Fri Sep 16 02:12:17 2022 ] Top5: 81.21%
553
+ [ Fri Sep 16 02:12:18 2022 ] Training epoch: 84
554
+ [ Fri Sep 16 02:13:00 2022 ] Batch(53/162) done. Loss: 0.0153 lr:0.001000 network_time: 0.0308
555
+ [ Fri Sep 16 02:14:13 2022 ] Batch(153/162) done. Loss: 0.0306 lr:0.001000 network_time: 0.0297
556
+ [ Fri Sep 16 02:14:19 2022 ] Eval epoch: 84
557
+ [ Fri Sep 16 02:16:07 2022 ] Mean test loss of 930 batches: 2.524808883666992.
558
+ [ Fri Sep 16 02:16:08 2022 ] Top1: 55.55%
559
+ [ Fri Sep 16 02:16:08 2022 ] Top5: 81.22%
560
+ [ Fri Sep 16 02:16:08 2022 ] Training epoch: 85
561
+ [ Fri Sep 16 02:17:18 2022 ] Batch(91/162) done. Loss: 0.0149 lr:0.001000 network_time: 0.0278
562
+ [ Fri Sep 16 02:18:10 2022 ] Eval epoch: 85
563
+ [ Fri Sep 16 02:19:58 2022 ] Mean test loss of 930 batches: 2.586280345916748.
564
+ [ Fri Sep 16 02:19:58 2022 ] Top1: 55.22%
565
+ [ Fri Sep 16 02:19:59 2022 ] Top5: 81.15%
566
+ [ Fri Sep 16 02:19:59 2022 ] Training epoch: 86
567
+ [ Fri Sep 16 02:20:24 2022 ] Batch(29/162) done. Loss: 0.0106 lr:0.001000 network_time: 0.0550
568
+ [ Fri Sep 16 02:21:37 2022 ] Batch(129/162) done. Loss: 0.0091 lr:0.001000 network_time: 0.0295
569
+ [ Fri Sep 16 02:22:00 2022 ] Eval epoch: 86
570
+ [ Fri Sep 16 02:23:48 2022 ] Mean test loss of 930 batches: 2.5408122539520264.
571
+ [ Fri Sep 16 02:23:49 2022 ] Top1: 55.49%
572
+ [ Fri Sep 16 02:23:49 2022 ] Top5: 81.15%
573
+ [ Fri Sep 16 02:23:49 2022 ] Training epoch: 87
574
+ [ Fri Sep 16 02:24:41 2022 ] Batch(67/162) done. Loss: 0.0092 lr:0.001000 network_time: 0.0257
575
+ [ Fri Sep 16 02:25:50 2022 ] Eval epoch: 87
576
+ [ Fri Sep 16 02:27:38 2022 ] Mean test loss of 930 batches: 2.536407232284546.
577
+ [ Fri Sep 16 02:27:38 2022 ] Top1: 55.42%
578
+ [ Fri Sep 16 02:27:39 2022 ] Top5: 81.31%
579
+ [ Fri Sep 16 02:27:39 2022 ] Training epoch: 88
580
+ [ Fri Sep 16 02:27:46 2022 ] Batch(5/162) done. Loss: 0.0140 lr:0.001000 network_time: 0.0274
581
+ [ Fri Sep 16 02:28:59 2022 ] Batch(105/162) done. Loss: 0.0098 lr:0.001000 network_time: 0.0277
582
+ [ Fri Sep 16 02:29:40 2022 ] Eval epoch: 88
583
+ [ Fri Sep 16 02:31:28 2022 ] Mean test loss of 930 batches: 2.528388023376465.
584
+ [ Fri Sep 16 02:31:29 2022 ] Top1: 55.43%
585
+ [ Fri Sep 16 02:31:29 2022 ] Top5: 81.16%
586
+ [ Fri Sep 16 02:31:29 2022 ] Training epoch: 89
587
+ [ Fri Sep 16 02:32:05 2022 ] Batch(43/162) done. Loss: 0.0132 lr:0.001000 network_time: 0.0303
588
+ [ Fri Sep 16 02:33:17 2022 ] Batch(143/162) done. Loss: 0.0126 lr:0.001000 network_time: 0.0281
589
+ [ Fri Sep 16 02:33:31 2022 ] Eval epoch: 89
590
+ [ Fri Sep 16 02:35:18 2022 ] Mean test loss of 930 batches: 2.5503320693969727.
591
+ [ Fri Sep 16 02:35:19 2022 ] Top1: 55.38%
592
+ [ Fri Sep 16 02:35:19 2022 ] Top5: 81.19%
593
+ [ Fri Sep 16 02:35:20 2022 ] Training epoch: 90
594
+ [ Fri Sep 16 02:36:22 2022 ] Batch(81/162) done. Loss: 0.0094 lr:0.001000 network_time: 0.0324
595
+ [ Fri Sep 16 02:37:21 2022 ] Eval epoch: 90
596
+ [ Fri Sep 16 02:39:09 2022 ] Mean test loss of 930 batches: 2.556290864944458.
597
+ [ Fri Sep 16 02:39:09 2022 ] Top1: 55.48%
598
+ [ Fri Sep 16 02:39:10 2022 ] Top5: 81.22%
599
+ [ Fri Sep 16 02:39:10 2022 ] Training epoch: 91
600
+ [ Fri Sep 16 02:39:28 2022 ] Batch(19/162) done. Loss: 0.0091 lr:0.001000 network_time: 0.0280
601
+ [ Fri Sep 16 02:40:40 2022 ] Batch(119/162) done. Loss: 0.0052 lr:0.001000 network_time: 0.0256
602
+ [ Fri Sep 16 02:41:11 2022 ] Eval epoch: 91
603
+ [ Fri Sep 16 02:42:59 2022 ] Mean test loss of 930 batches: 2.58315372467041.
604
+ [ Fri Sep 16 02:43:00 2022 ] Top1: 55.23%
605
+ [ Fri Sep 16 02:43:00 2022 ] Top5: 81.20%
606
+ [ Fri Sep 16 02:43:00 2022 ] Training epoch: 92
607
+ [ Fri Sep 16 02:43:46 2022 ] Batch(57/162) done. Loss: 0.0191 lr:0.001000 network_time: 0.0328
608
+ [ Fri Sep 16 02:44:58 2022 ] Batch(157/162) done. Loss: 0.0156 lr:0.001000 network_time: 0.0282
609
+ [ Fri Sep 16 02:45:02 2022 ] Eval epoch: 92
610
+ [ Fri Sep 16 02:46:50 2022 ] Mean test loss of 930 batches: 2.5625314712524414.
611
+ [ Fri Sep 16 02:46:50 2022 ] Top1: 55.23%
612
+ [ Fri Sep 16 02:46:50 2022 ] Top5: 81.03%
613
+ [ Fri Sep 16 02:46:51 2022 ] Training epoch: 93
614
+ [ Fri Sep 16 02:48:04 2022 ] Batch(95/162) done. Loss: 0.0076 lr:0.001000 network_time: 0.0268
615
+ [ Fri Sep 16 02:48:52 2022 ] Eval epoch: 93
616
+ [ Fri Sep 16 02:50:40 2022 ] Mean test loss of 930 batches: 2.5470073223114014.
617
+ [ Fri Sep 16 02:50:40 2022 ] Top1: 55.65%
618
+ [ Fri Sep 16 02:50:41 2022 ] Top5: 81.37%
619
+ [ Fri Sep 16 02:50:41 2022 ] Training epoch: 94
620
+ [ Fri Sep 16 02:51:09 2022 ] Batch(33/162) done. Loss: 0.0263 lr:0.001000 network_time: 0.0274
621
+ [ Fri Sep 16 02:52:22 2022 ] Batch(133/162) done. Loss: 0.0480 lr:0.001000 network_time: 0.0274
622
+ [ Fri Sep 16 02:52:42 2022 ] Eval epoch: 94
623
+ [ Fri Sep 16 02:54:30 2022 ] Mean test loss of 930 batches: 2.5392682552337646.
624
+ [ Fri Sep 16 02:54:30 2022 ] Top1: 55.68%
625
+ [ Fri Sep 16 02:54:31 2022 ] Top5: 81.33%
626
+ [ Fri Sep 16 02:54:31 2022 ] Training epoch: 95
627
+ [ Fri Sep 16 02:55:26 2022 ] Batch(71/162) done. Loss: 0.0408 lr:0.001000 network_time: 0.0315
628
+ [ Fri Sep 16 02:56:32 2022 ] Eval epoch: 95
629
+ [ Fri Sep 16 02:58:20 2022 ] Mean test loss of 930 batches: 2.55815052986145.
630
+ [ Fri Sep 16 02:58:21 2022 ] Top1: 55.28%
631
+ [ Fri Sep 16 02:58:21 2022 ] Top5: 81.19%
632
+ [ Fri Sep 16 02:58:21 2022 ] Training epoch: 96
633
+ [ Fri Sep 16 02:58:32 2022 ] Batch(9/162) done. Loss: 0.0116 lr:0.001000 network_time: 0.0290
634
+ [ Fri Sep 16 02:59:44 2022 ] Batch(109/162) done. Loss: 0.0113 lr:0.001000 network_time: 0.0284
635
+ [ Fri Sep 16 03:00:23 2022 ] Eval epoch: 96
636
+ [ Fri Sep 16 03:02:10 2022 ] Mean test loss of 930 batches: 2.5667190551757812.
637
+ [ Fri Sep 16 03:02:11 2022 ] Top1: 55.56%
638
+ [ Fri Sep 16 03:02:11 2022 ] Top5: 81.15%
639
+ [ Fri Sep 16 03:02:11 2022 ] Training epoch: 97
640
+ [ Fri Sep 16 03:02:49 2022 ] Batch(47/162) done. Loss: 0.0686 lr:0.001000 network_time: 0.0280
641
+ [ Fri Sep 16 03:04:02 2022 ] Batch(147/162) done. Loss: 0.0074 lr:0.001000 network_time: 0.0278
642
+ [ Fri Sep 16 03:04:12 2022 ] Eval epoch: 97
643
+ [ Fri Sep 16 03:06:00 2022 ] Mean test loss of 930 batches: 2.544494867324829.
644
+ [ Fri Sep 16 03:06:00 2022 ] Top1: 55.48%
645
+ [ Fri Sep 16 03:06:01 2022 ] Top5: 81.22%
646
+ [ Fri Sep 16 03:06:01 2022 ] Training epoch: 98
647
+ [ Fri Sep 16 03:07:07 2022 ] Batch(85/162) done. Loss: 0.0252 lr:0.001000 network_time: 0.0278
648
+ [ Fri Sep 16 03:08:02 2022 ] Eval epoch: 98
649
+ [ Fri Sep 16 03:09:50 2022 ] Mean test loss of 930 batches: 2.5435867309570312.
650
+ [ Fri Sep 16 03:09:50 2022 ] Top1: 55.66%
651
+ [ Fri Sep 16 03:09:51 2022 ] Top5: 81.36%
652
+ [ Fri Sep 16 03:09:51 2022 ] Training epoch: 99
653
+ [ Fri Sep 16 03:10:12 2022 ] Batch(23/162) done. Loss: 0.0143 lr:0.001000 network_time: 0.0323
654
+ [ Fri Sep 16 03:11:24 2022 ] Batch(123/162) done. Loss: 0.0108 lr:0.001000 network_time: 0.0284
655
+ [ Fri Sep 16 03:11:52 2022 ] Eval epoch: 99
656
+ [ Fri Sep 16 03:13:40 2022 ] Mean test loss of 930 batches: 2.562251567840576.
657
+ [ Fri Sep 16 03:13:40 2022 ] Top1: 55.43%
658
+ [ Fri Sep 16 03:13:41 2022 ] Top5: 81.23%
659
+ [ Fri Sep 16 03:13:41 2022 ] Training epoch: 100
660
+ [ Fri Sep 16 03:14:29 2022 ] Batch(61/162) done. Loss: 0.0118 lr:0.001000 network_time: 0.0284
661
+ [ Fri Sep 16 03:15:42 2022 ] Batch(161/162) done. Loss: 0.0102 lr:0.001000 network_time: 0.0315
662
+ [ Fri Sep 16 03:15:42 2022 ] Eval epoch: 100
663
+ [ Fri Sep 16 03:17:30 2022 ] Mean test loss of 930 batches: 2.5743367671966553.
664
+ [ Fri Sep 16 03:17:31 2022 ] Top1: 55.23%
665
+ [ Fri Sep 16 03:17:31 2022 ] Top5: 80.93%
666
+ [ Fri Sep 16 03:17:31 2022 ] Training epoch: 101
667
+ [ Fri Sep 16 03:18:47 2022 ] Batch(99/162) done. Loss: 0.0172 lr:0.000100 network_time: 0.0292
668
+ [ Fri Sep 16 03:19:32 2022 ] Eval epoch: 101
669
+ [ Fri Sep 16 03:21:21 2022 ] Mean test loss of 930 batches: 2.547807455062866.
670
+ [ Fri Sep 16 03:21:21 2022 ] Top1: 55.69%
671
+ [ Fri Sep 16 03:21:22 2022 ] Top5: 81.43%
672
+ [ Fri Sep 16 03:21:22 2022 ] Training epoch: 102
673
+ [ Fri Sep 16 03:21:52 2022 ] Batch(37/162) done. Loss: 0.0108 lr:0.000100 network_time: 0.0309
674
+ [ Fri Sep 16 03:23:05 2022 ] Batch(137/162) done. Loss: 0.0219 lr:0.000100 network_time: 0.0266
675
+ [ Fri Sep 16 03:23:23 2022 ] Eval epoch: 102
676
+ [ Fri Sep 16 03:25:10 2022 ] Mean test loss of 930 batches: 2.5248377323150635.
677
+ [ Fri Sep 16 03:25:11 2022 ] Top1: 55.67%
678
+ [ Fri Sep 16 03:25:11 2022 ] Top5: 81.42%
679
+ [ Fri Sep 16 03:25:12 2022 ] Training epoch: 103
680
+ [ Fri Sep 16 03:26:10 2022 ] Batch(75/162) done. Loss: 0.0303 lr:0.000100 network_time: 0.0273
681
+ [ Fri Sep 16 03:27:13 2022 ] Eval epoch: 103
682
+ [ Fri Sep 16 03:29:00 2022 ] Mean test loss of 930 batches: 2.5692896842956543.
683
+ [ Fri Sep 16 03:29:01 2022 ] Top1: 55.27%
684
+ [ Fri Sep 16 03:29:01 2022 ] Top5: 81.23%
685
+ [ Fri Sep 16 03:29:01 2022 ] Training epoch: 104
686
+ [ Fri Sep 16 03:29:14 2022 ] Batch(13/162) done. Loss: 0.0176 lr:0.000100 network_time: 0.0322
687
+ [ Fri Sep 16 03:30:27 2022 ] Batch(113/162) done. Loss: 0.0143 lr:0.000100 network_time: 0.0262
688
+ [ Fri Sep 16 03:31:02 2022 ] Eval epoch: 104
689
+ [ Fri Sep 16 03:32:51 2022 ] Mean test loss of 930 batches: 2.5471296310424805.
690
+ [ Fri Sep 16 03:32:51 2022 ] Top1: 55.70%
691
+ [ Fri Sep 16 03:32:51 2022 ] Top5: 81.42%
692
+ [ Fri Sep 16 03:32:52 2022 ] Training epoch: 105
693
+ [ Fri Sep 16 03:33:33 2022 ] Batch(51/162) done. Loss: 0.0230 lr:0.000100 network_time: 0.0288
694
+ [ Fri Sep 16 03:34:45 2022 ] Batch(151/162) done. Loss: 0.0163 lr:0.000100 network_time: 0.0269
695
+ [ Fri Sep 16 03:34:53 2022 ] Eval epoch: 105
696
+ [ Fri Sep 16 03:36:41 2022 ] Mean test loss of 930 batches: 2.578871488571167.
697
+ [ Fri Sep 16 03:36:41 2022 ] Top1: 55.30%
698
+ [ Fri Sep 16 03:36:42 2022 ] Top5: 81.05%
699
+ [ Fri Sep 16 03:36:42 2022 ] Training epoch: 106
700
+ [ Fri Sep 16 03:37:50 2022 ] Batch(89/162) done. Loss: 0.0128 lr:0.000100 network_time: 0.0276
701
+ [ Fri Sep 16 03:38:43 2022 ] Eval epoch: 106
702
+ [ Fri Sep 16 03:40:30 2022 ] Mean test loss of 930 batches: 2.5462167263031006.
703
+ [ Fri Sep 16 03:40:31 2022 ] Top1: 55.46%
704
+ [ Fri Sep 16 03:40:31 2022 ] Top5: 81.28%
705
+ [ Fri Sep 16 03:40:32 2022 ] Training epoch: 107
706
+ [ Fri Sep 16 03:40:55 2022 ] Batch(27/162) done. Loss: 0.0202 lr:0.000100 network_time: 0.0264
707
+ [ Fri Sep 16 03:42:08 2022 ] Batch(127/162) done. Loss: 0.0215 lr:0.000100 network_time: 0.0324
708
+ [ Fri Sep 16 03:42:33 2022 ] Eval epoch: 107
709
+ [ Fri Sep 16 03:44:21 2022 ] Mean test loss of 930 batches: 2.5570015907287598.
710
+ [ Fri Sep 16 03:44:21 2022 ] Top1: 55.48%
711
+ [ Fri Sep 16 03:44:21 2022 ] Top5: 81.23%
712
+ [ Fri Sep 16 03:44:22 2022 ] Training epoch: 108
713
+ [ Fri Sep 16 03:45:13 2022 ] Batch(65/162) done. Loss: 0.0059 lr:0.000100 network_time: 0.0286
714
+ [ Fri Sep 16 03:46:23 2022 ] Eval epoch: 108
715
+ [ Fri Sep 16 03:48:10 2022 ] Mean test loss of 930 batches: 2.585301637649536.
716
+ [ Fri Sep 16 03:48:11 2022 ] Top1: 55.24%
717
+ [ Fri Sep 16 03:48:11 2022 ] Top5: 81.05%
718
+ [ Fri Sep 16 03:48:12 2022 ] Training epoch: 109
719
+ [ Fri Sep 16 03:48:17 2022 ] Batch(3/162) done. Loss: 0.0050 lr:0.000100 network_time: 0.0293
720
+ [ Fri Sep 16 03:49:30 2022 ] Batch(103/162) done. Loss: 0.0114 lr:0.000100 network_time: 0.0265
721
+ [ Fri Sep 16 03:50:13 2022 ] Eval epoch: 109
722
+ [ Fri Sep 16 03:52:00 2022 ] Mean test loss of 930 batches: 2.557034492492676.
723
+ [ Fri Sep 16 03:52:01 2022 ] Top1: 55.44%
724
+ [ Fri Sep 16 03:52:01 2022 ] Top5: 81.18%
725
+ [ Fri Sep 16 03:52:02 2022 ] Training epoch: 110
726
+ [ Fri Sep 16 03:52:35 2022 ] Batch(41/162) done. Loss: 0.0297 lr:0.000100 network_time: 0.0346
727
+ [ Fri Sep 16 03:53:48 2022 ] Batch(141/162) done. Loss: 0.0063 lr:0.000100 network_time: 0.0528
728
+ [ Fri Sep 16 03:54:02 2022 ] Eval epoch: 110
729
+ [ Fri Sep 16 03:55:51 2022 ] Mean test loss of 930 batches: 2.576395034790039.
730
+ [ Fri Sep 16 03:55:51 2022 ] Top1: 55.13%
731
+ [ Fri Sep 16 03:55:52 2022 ] Top5: 81.20%
732
+ [ Fri Sep 16 03:55:52 2022 ] Training epoch: 111
733
+ [ Fri Sep 16 03:56:53 2022 ] Batch(79/162) done. Loss: 0.0034 lr:0.000100 network_time: 0.0263
734
+ [ Fri Sep 16 03:57:53 2022 ] Eval epoch: 111
735
+ [ Fri Sep 16 03:59:41 2022 ] Mean test loss of 930 batches: 2.577402114868164.
736
+ [ Fri Sep 16 03:59:41 2022 ] Top1: 55.06%
737
+ [ Fri Sep 16 03:59:42 2022 ] Top5: 80.97%
738
+ [ Fri Sep 16 03:59:42 2022 ] Training epoch: 112
739
+ [ Fri Sep 16 03:59:58 2022 ] Batch(17/162) done. Loss: 0.0095 lr:0.000100 network_time: 0.0267
740
+ [ Fri Sep 16 04:01:11 2022 ] Batch(117/162) done. Loss: 0.0455 lr:0.000100 network_time: 0.0268
741
+ [ Fri Sep 16 04:01:43 2022 ] Eval epoch: 112
742
+ [ Fri Sep 16 04:03:30 2022 ] Mean test loss of 930 batches: 2.5660643577575684.
743
+ [ Fri Sep 16 04:03:31 2022 ] Top1: 55.42%
744
+ [ Fri Sep 16 04:03:31 2022 ] Top5: 81.13%
745
+ [ Fri Sep 16 04:03:32 2022 ] Training epoch: 113
746
+ [ Fri Sep 16 04:04:15 2022 ] Batch(55/162) done. Loss: 0.0135 lr:0.000100 network_time: 0.0284
747
+ [ Fri Sep 16 04:05:28 2022 ] Batch(155/162) done. Loss: 0.0128 lr:0.000100 network_time: 0.0269
748
+ [ Fri Sep 16 04:05:33 2022 ] Eval epoch: 113
749
+ [ Fri Sep 16 04:07:20 2022 ] Mean test loss of 930 batches: 2.570066213607788.
750
+ [ Fri Sep 16 04:07:21 2022 ] Top1: 54.85%
751
+ [ Fri Sep 16 04:07:21 2022 ] Top5: 81.01%
752
+ [ Fri Sep 16 04:07:21 2022 ] Training epoch: 114
753
+ [ Fri Sep 16 04:08:33 2022 ] Batch(93/162) done. Loss: 0.0188 lr:0.000100 network_time: 0.0278
754
+ [ Fri Sep 16 04:09:23 2022 ] Eval epoch: 114
755
+ [ Fri Sep 16 04:11:11 2022 ] Mean test loss of 930 batches: 2.5548155307769775.
756
+ [ Fri Sep 16 04:11:11 2022 ] Top1: 55.34%
757
+ [ Fri Sep 16 04:11:12 2022 ] Top5: 81.18%
758
+ [ Fri Sep 16 04:11:12 2022 ] Training epoch: 115
759
+ [ Fri Sep 16 04:11:38 2022 ] Batch(31/162) done. Loss: 0.0057 lr:0.000100 network_time: 0.0267
760
+ [ Fri Sep 16 04:12:51 2022 ] Batch(131/162) done. Loss: 0.0136 lr:0.000100 network_time: 0.0275
761
+ [ Fri Sep 16 04:13:13 2022 ] Eval epoch: 115
762
+ [ Fri Sep 16 04:15:00 2022 ] Mean test loss of 930 batches: 2.551961898803711.
763
+ [ Fri Sep 16 04:15:01 2022 ] Top1: 55.49%
764
+ [ Fri Sep 16 04:15:01 2022 ] Top5: 81.27%
765
+ [ Fri Sep 16 04:15:02 2022 ] Training epoch: 116
766
+ [ Fri Sep 16 04:15:55 2022 ] Batch(69/162) done. Loss: 0.0155 lr:0.000100 network_time: 0.0265
767
+ [ Fri Sep 16 04:17:02 2022 ] Eval epoch: 116
768
+ [ Fri Sep 16 04:18:50 2022 ] Mean test loss of 930 batches: 2.55898118019104.
769
+ [ Fri Sep 16 04:18:51 2022 ] Top1: 55.49%
770
+ [ Fri Sep 16 04:18:51 2022 ] Top5: 81.27%
771
+ [ Fri Sep 16 04:18:52 2022 ] Training epoch: 117
772
+ [ Fri Sep 16 04:19:00 2022 ] Batch(7/162) done. Loss: 0.0189 lr:0.000100 network_time: 0.0276
773
+ [ Fri Sep 16 04:20:13 2022 ] Batch(107/162) done. Loss: 0.0054 lr:0.000100 network_time: 0.0276
774
+ [ Fri Sep 16 04:20:53 2022 ] Eval epoch: 117
775
+ [ Fri Sep 16 04:22:40 2022 ] Mean test loss of 930 batches: 2.54909348487854.
776
+ [ Fri Sep 16 04:22:40 2022 ] Top1: 55.64%
777
+ [ Fri Sep 16 04:22:41 2022 ] Top5: 81.25%
778
+ [ Fri Sep 16 04:22:41 2022 ] Training epoch: 118
779
+ [ Fri Sep 16 04:23:18 2022 ] Batch(45/162) done. Loss: 0.0172 lr:0.000100 network_time: 0.0312
780
+ [ Fri Sep 16 04:24:31 2022 ] Batch(145/162) done. Loss: 0.0081 lr:0.000100 network_time: 0.0258
781
+ [ Fri Sep 16 04:24:43 2022 ] Eval epoch: 118
782
+ [ Fri Sep 16 04:26:30 2022 ] Mean test loss of 930 batches: 2.568805456161499.
783
+ [ Fri Sep 16 04:26:31 2022 ] Top1: 55.38%
784
+ [ Fri Sep 16 04:26:31 2022 ] Top5: 81.23%
785
+ [ Fri Sep 16 04:26:32 2022 ] Training epoch: 119
786
+ [ Fri Sep 16 04:27:36 2022 ] Batch(83/162) done. Loss: 0.0195 lr:0.000100 network_time: 0.0252
787
+ [ Fri Sep 16 04:28:33 2022 ] Eval epoch: 119
788
+ [ Fri Sep 16 04:30:21 2022 ] Mean test loss of 930 batches: 2.5613887310028076.
789
+ [ Fri Sep 16 04:30:22 2022 ] Top1: 55.40%
790
+ [ Fri Sep 16 04:30:22 2022 ] Top5: 81.13%
791
+ [ Fri Sep 16 04:30:22 2022 ] Training epoch: 120
792
+ [ Fri Sep 16 04:30:42 2022 ] Batch(21/162) done. Loss: 0.0305 lr:0.000100 network_time: 0.0549
793
+ [ Fri Sep 16 04:31:54 2022 ] Batch(121/162) done. Loss: 0.0053 lr:0.000100 network_time: 0.0306
794
+ [ Fri Sep 16 04:32:24 2022 ] Eval epoch: 120
795
+ [ Fri Sep 16 04:34:11 2022 ] Mean test loss of 930 batches: 2.565460443496704.
796
+ [ Fri Sep 16 04:34:12 2022 ] Top1: 55.60%
797
+ [ Fri Sep 16 04:34:12 2022 ] Top5: 81.27%
798
+ [ Fri Sep 16 04:34:13 2022 ] Training epoch: 121
799
+ [ Fri Sep 16 04:34:59 2022 ] Batch(59/162) done. Loss: 0.0073 lr:0.000100 network_time: 0.0274
800
+ [ Fri Sep 16 04:36:12 2022 ] Batch(159/162) done. Loss: 0.0292 lr:0.000100 network_time: 0.0318
801
+ [ Fri Sep 16 04:36:14 2022 ] Eval epoch: 121
802
+ [ Fri Sep 16 04:38:02 2022 ] Mean test loss of 930 batches: 2.552762985229492.
803
+ [ Fri Sep 16 04:38:02 2022 ] Top1: 55.36%
804
+ [ Fri Sep 16 04:38:03 2022 ] Top5: 81.15%
805
+ [ Fri Sep 16 04:38:03 2022 ] Training epoch: 122
806
+ [ Fri Sep 16 04:39:17 2022 ] Batch(97/162) done. Loss: 0.0103 lr:0.000100 network_time: 0.0281
807
+ [ Fri Sep 16 04:40:04 2022 ] Eval epoch: 122
808
+ [ Fri Sep 16 04:41:52 2022 ] Mean test loss of 930 batches: 2.5489044189453125.
809
+ [ Fri Sep 16 04:41:53 2022 ] Top1: 55.62%
810
+ [ Fri Sep 16 04:41:53 2022 ] Top5: 81.31%
811
+ [ Fri Sep 16 04:41:53 2022 ] Training epoch: 123
812
+ [ Fri Sep 16 04:42:23 2022 ] Batch(35/162) done. Loss: 0.0388 lr:0.000100 network_time: 0.0267
813
+ [ Fri Sep 16 04:43:35 2022 ] Batch(135/162) done. Loss: 0.0089 lr:0.000100 network_time: 0.0272
814
+ [ Fri Sep 16 04:43:55 2022 ] Eval epoch: 123
815
+ [ Fri Sep 16 04:45:42 2022 ] Mean test loss of 930 batches: 2.5398478507995605.
816
+ [ Fri Sep 16 04:45:43 2022 ] Top1: 55.33%
817
+ [ Fri Sep 16 04:45:43 2022 ] Top5: 81.14%
818
+ [ Fri Sep 16 04:45:43 2022 ] Training epoch: 124
819
+ [ Fri Sep 16 04:46:40 2022 ] Batch(73/162) done. Loss: 0.0072 lr:0.000100 network_time: 0.0264
820
+ [ Fri Sep 16 04:47:44 2022 ] Eval epoch: 124
821
+ [ Fri Sep 16 04:49:33 2022 ] Mean test loss of 930 batches: 2.5149850845336914.
822
+ [ Fri Sep 16 04:49:33 2022 ] Top1: 55.92%
823
+ [ Fri Sep 16 04:49:34 2022 ] Top5: 81.55%
824
+ [ Fri Sep 16 04:49:34 2022 ] Training epoch: 125
825
+ [ Fri Sep 16 04:49:46 2022 ] Batch(11/162) done. Loss: 0.0073 lr:0.000100 network_time: 0.0301
826
+ [ Fri Sep 16 04:50:59 2022 ] Batch(111/162) done. Loss: 0.0125 lr:0.000100 network_time: 0.0331
827
+ [ Fri Sep 16 04:51:35 2022 ] Eval epoch: 125
828
+ [ Fri Sep 16 04:53:23 2022 ] Mean test loss of 930 batches: 2.5590920448303223.
829
+ [ Fri Sep 16 04:53:23 2022 ] Top1: 55.52%
830
+ [ Fri Sep 16 04:53:24 2022 ] Top5: 81.25%
831
+ [ Fri Sep 16 04:53:24 2022 ] Training epoch: 126
832
+ [ Fri Sep 16 04:54:03 2022 ] Batch(49/162) done. Loss: 0.0049 lr:0.000100 network_time: 0.0409
833
+ [ Fri Sep 16 04:55:16 2022 ] Batch(149/162) done. Loss: 0.0071 lr:0.000100 network_time: 0.0270
834
+ [ Fri Sep 16 04:55:25 2022 ] Eval epoch: 126
835
+ [ Fri Sep 16 04:57:13 2022 ] Mean test loss of 930 batches: 2.547355890274048.
836
+ [ Fri Sep 16 04:57:14 2022 ] Top1: 55.43%
837
+ [ Fri Sep 16 04:57:14 2022 ] Top5: 81.17%
838
+ [ Fri Sep 16 04:57:14 2022 ] Training epoch: 127
839
+ [ Fri Sep 16 04:58:21 2022 ] Batch(87/162) done. Loss: 0.0054 lr:0.000100 network_time: 0.0279
840
+ [ Fri Sep 16 04:59:15 2022 ] Eval epoch: 127
841
+ [ Fri Sep 16 05:01:03 2022 ] Mean test loss of 930 batches: 2.557677745819092.
842
+ [ Fri Sep 16 05:01:04 2022 ] Top1: 55.55%
843
+ [ Fri Sep 16 05:01:04 2022 ] Top5: 81.28%
844
+ [ Fri Sep 16 05:01:04 2022 ] Training epoch: 128
845
+ [ Fri Sep 16 05:01:26 2022 ] Batch(25/162) done. Loss: 0.0053 lr:0.000100 network_time: 0.0262
846
+ [ Fri Sep 16 05:02:39 2022 ] Batch(125/162) done. Loss: 0.0104 lr:0.000100 network_time: 0.0308
847
+ [ Fri Sep 16 05:03:06 2022 ] Eval epoch: 128
848
+ [ Fri Sep 16 05:04:53 2022 ] Mean test loss of 930 batches: 2.564342498779297.
849
+ [ Fri Sep 16 05:04:54 2022 ] Top1: 55.33%
850
+ [ Fri Sep 16 05:04:54 2022 ] Top5: 81.05%
851
+ [ Fri Sep 16 05:04:54 2022 ] Training epoch: 129
852
+ [ Fri Sep 16 05:05:44 2022 ] Batch(63/162) done. Loss: 0.0051 lr:0.000100 network_time: 0.0265
853
+ [ Fri Sep 16 05:06:56 2022 ] Eval epoch: 129
854
+ [ Fri Sep 16 05:08:43 2022 ] Mean test loss of 930 batches: 2.5628936290740967.
855
+ [ Fri Sep 16 05:08:43 2022 ] Top1: 55.28%
856
+ [ Fri Sep 16 05:08:44 2022 ] Top5: 81.18%
857
+ [ Fri Sep 16 05:08:44 2022 ] Training epoch: 130
858
+ [ Fri Sep 16 05:08:48 2022 ] Batch(1/162) done. Loss: 0.0198 lr:0.000100 network_time: 0.0259
859
+ [ Fri Sep 16 05:10:01 2022 ] Batch(101/162) done. Loss: 0.0120 lr:0.000100 network_time: 0.0274
860
+ [ Fri Sep 16 05:10:45 2022 ] Eval epoch: 130
861
+ [ Fri Sep 16 05:12:33 2022 ] Mean test loss of 930 batches: 2.5282130241394043.
862
+ [ Fri Sep 16 05:12:33 2022 ] Top1: 55.60%
863
+ [ Fri Sep 16 05:12:34 2022 ] Top5: 81.39%
864
+ [ Fri Sep 16 05:12:34 2022 ] Training epoch: 131
865
+ [ Fri Sep 16 05:13:06 2022 ] Batch(39/162) done. Loss: 0.0072 lr:0.000100 network_time: 0.0219
866
+ [ Fri Sep 16 05:14:19 2022 ] Batch(139/162) done. Loss: 0.0100 lr:0.000100 network_time: 0.0271
867
+ [ Fri Sep 16 05:14:35 2022 ] Eval epoch: 131
868
+ [ Fri Sep 16 05:16:23 2022 ] Mean test loss of 930 batches: 2.547071933746338.
869
+ [ Fri Sep 16 05:16:23 2022 ] Top1: 55.65%
870
+ [ Fri Sep 16 05:16:23 2022 ] Top5: 81.40%
871
+ [ Fri Sep 16 05:16:24 2022 ] Training epoch: 132
872
+ [ Fri Sep 16 05:17:23 2022 ] Batch(77/162) done. Loss: 0.0125 lr:0.000100 network_time: 0.0322
873
+ [ Fri Sep 16 05:18:25 2022 ] Eval epoch: 132
874
+ [ Fri Sep 16 05:20:12 2022 ] Mean test loss of 930 batches: 2.5421905517578125.
875
+ [ Fri Sep 16 05:20:13 2022 ] Top1: 55.51%
876
+ [ Fri Sep 16 05:20:13 2022 ] Top5: 81.40%
877
+ [ Fri Sep 16 05:20:13 2022 ] Training epoch: 133
878
+ [ Fri Sep 16 05:20:28 2022 ] Batch(15/162) done. Loss: 0.0080 lr:0.000100 network_time: 0.0302
879
+ [ Fri Sep 16 05:21:41 2022 ] Batch(115/162) done. Loss: 0.0089 lr:0.000100 network_time: 0.0285
880
+ [ Fri Sep 16 05:22:15 2022 ] Eval epoch: 133
881
+ [ Fri Sep 16 05:24:04 2022 ] Mean test loss of 930 batches: 2.5551788806915283.
882
+ [ Fri Sep 16 05:24:04 2022 ] Top1: 55.47%
883
+ [ Fri Sep 16 05:24:05 2022 ] Top5: 81.22%
884
+ [ Fri Sep 16 05:24:05 2022 ] Training epoch: 134
885
+ [ Fri Sep 16 05:24:47 2022 ] Batch(53/162) done. Loss: 0.0142 lr:0.000100 network_time: 0.0318
886
+ [ Fri Sep 16 05:26:00 2022 ] Batch(153/162) done. Loss: 0.0062 lr:0.000100 network_time: 0.0561
887
+ [ Fri Sep 16 05:26:06 2022 ] Eval epoch: 134
888
+ [ Fri Sep 16 05:27:53 2022 ] Mean test loss of 930 batches: 2.6278786659240723.
889
+ [ Fri Sep 16 05:27:54 2022 ] Top1: 54.84%
890
+ [ Fri Sep 16 05:27:54 2022 ] Top5: 80.96%
891
+ [ Fri Sep 16 05:27:55 2022 ] Training epoch: 135
892
+ [ Fri Sep 16 05:29:05 2022 ] Batch(91/162) done. Loss: 0.0093 lr:0.000100 network_time: 0.0310
893
+ [ Fri Sep 16 05:29:56 2022 ] Eval epoch: 135
894
+ [ Fri Sep 16 05:31:44 2022 ] Mean test loss of 930 batches: 2.53376841545105.
895
+ [ Fri Sep 16 05:31:45 2022 ] Top1: 55.78%
896
+ [ Fri Sep 16 05:31:45 2022 ] Top5: 81.33%
897
+ [ Fri Sep 16 05:31:46 2022 ] Training epoch: 136
898
+ [ Fri Sep 16 05:32:11 2022 ] Batch(29/162) done. Loss: 0.0107 lr:0.000100 network_time: 0.0274
899
+ [ Fri Sep 16 05:33:23 2022 ] Batch(129/162) done. Loss: 0.0118 lr:0.000100 network_time: 0.0265
900
+ [ Fri Sep 16 05:33:47 2022 ] Eval epoch: 136
901
+ [ Fri Sep 16 05:35:34 2022 ] Mean test loss of 930 batches: 2.548306703567505.
902
+ [ Fri Sep 16 05:35:35 2022 ] Top1: 55.70%
903
+ [ Fri Sep 16 05:35:35 2022 ] Top5: 81.47%
904
+ [ Fri Sep 16 05:35:35 2022 ] Training epoch: 137
905
+ [ Fri Sep 16 05:36:28 2022 ] Batch(67/162) done. Loss: 0.0078 lr:0.000100 network_time: 0.0264
906
+ [ Fri Sep 16 05:37:36 2022 ] Eval epoch: 137
907
+ [ Fri Sep 16 05:39:24 2022 ] Mean test loss of 930 batches: 2.568533182144165.
908
+ [ Fri Sep 16 05:39:24 2022 ] Top1: 55.41%
909
+ [ Fri Sep 16 05:39:25 2022 ] Top5: 81.25%
910
+ [ Fri Sep 16 05:39:25 2022 ] Training epoch: 138
911
+ [ Fri Sep 16 05:39:33 2022 ] Batch(5/162) done. Loss: 0.0063 lr:0.000100 network_time: 0.0273
912
+ [ Fri Sep 16 05:40:45 2022 ] Batch(105/162) done. Loss: 0.0060 lr:0.000100 network_time: 0.0467
913
+ [ Fri Sep 16 05:41:26 2022 ] Eval epoch: 138
914
+ [ Fri Sep 16 05:43:14 2022 ] Mean test loss of 930 batches: 2.5520284175872803.
915
+ [ Fri Sep 16 05:43:15 2022 ] Top1: 55.65%
916
+ [ Fri Sep 16 05:43:15 2022 ] Top5: 81.48%
917
+ [ Fri Sep 16 05:43:15 2022 ] Training epoch: 139
918
+ [ Fri Sep 16 05:43:50 2022 ] Batch(43/162) done. Loss: 0.0217 lr:0.000100 network_time: 0.0277
919
+ [ Fri Sep 16 05:45:03 2022 ] Batch(143/162) done. Loss: 0.0124 lr:0.000100 network_time: 0.0297
920
+ [ Fri Sep 16 05:45:16 2022 ] Eval epoch: 139
921
+ [ Fri Sep 16 05:47:05 2022 ] Mean test loss of 930 batches: 2.5655524730682373.
922
+ [ Fri Sep 16 05:47:05 2022 ] Top1: 55.30%
923
+ [ Fri Sep 16 05:47:06 2022 ] Top5: 81.07%
924
+ [ Fri Sep 16 05:47:06 2022 ] Training epoch: 140
925
+ [ Fri Sep 16 05:48:08 2022 ] Batch(81/162) done. Loss: 0.0033 lr:0.000100 network_time: 0.0285
926
+ [ Fri Sep 16 05:49:07 2022 ] Eval epoch: 140
927
+ [ Fri Sep 16 05:50:55 2022 ] Mean test loss of 930 batches: 2.5775716304779053.
928
+ [ Fri Sep 16 05:50:55 2022 ] Top1: 55.40%
929
+ [ Fri Sep 16 05:50:56 2022 ] Top5: 81.10%
ckpt/Others/Shift-GCN/ntu120_xset/ntu120_joint_xset/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_bone_motion_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xsub/train_bone_motion.yaml
5
+ device:
6
+ - 2
7
+ - 3
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_bone_motion_xsub
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_bone_motion_xsub
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:74a9778b7fd6291e4b9dc1beaa1efba338b1ce78ee62400e6010224782ba6c2f
3
+ size 29946137
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/log.txt ADDED
@@ -0,0 +1,1043 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 14 18:31:36 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_bone_motion_xsub', 'model_saved_name': './save_models/ntu120_bone_motion_xsub', 'Experiment_name': 'ntu120_bone_motion_xsub', 'config': './config/ntu120_xsub/train_bone_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Wed Sep 14 18:31:36 2022 ] Training epoch: 1
5
+ [ Wed Sep 14 18:32:54 2022 ] Batch(99/243) done. Loss: 3.7551 lr:0.100000 network_time: 0.0252
6
+ [ Wed Sep 14 18:34:07 2022 ] Batch(199/243) done. Loss: 3.0830 lr:0.100000 network_time: 0.0270
7
+ [ Wed Sep 14 18:34:38 2022 ] Eval epoch: 1
8
+ [ Wed Sep 14 18:36:12 2022 ] Mean test loss of 796 batches: 5.711165904998779.
9
+ [ Wed Sep 14 18:36:12 2022 ] Top1: 8.25%
10
+ [ Wed Sep 14 18:36:12 2022 ] Top5: 22.59%
11
+ [ Wed Sep 14 18:36:13 2022 ] Training epoch: 2
12
+ [ Wed Sep 14 18:36:58 2022 ] Batch(56/243) done. Loss: 2.6547 lr:0.100000 network_time: 0.0297
13
+ [ Wed Sep 14 18:38:10 2022 ] Batch(156/243) done. Loss: 2.3958 lr:0.100000 network_time: 0.0310
14
+ [ Wed Sep 14 18:39:13 2022 ] Eval epoch: 2
15
+ [ Wed Sep 14 18:40:47 2022 ] Mean test loss of 796 batches: 4.712782859802246.
16
+ [ Wed Sep 14 18:40:47 2022 ] Top1: 14.20%
17
+ [ Wed Sep 14 18:40:48 2022 ] Top5: 36.21%
18
+ [ Wed Sep 14 18:40:48 2022 ] Training epoch: 3
19
+ [ Wed Sep 14 18:41:01 2022 ] Batch(13/243) done. Loss: 1.9252 lr:0.100000 network_time: 0.0304
20
+ [ Wed Sep 14 18:42:14 2022 ] Batch(113/243) done. Loss: 1.6989 lr:0.100000 network_time: 0.0279
21
+ [ Wed Sep 14 18:43:27 2022 ] Batch(213/243) done. Loss: 1.8036 lr:0.100000 network_time: 0.0274
22
+ [ Wed Sep 14 18:43:49 2022 ] Eval epoch: 3
23
+ [ Wed Sep 14 18:45:23 2022 ] Mean test loss of 796 batches: 4.215649127960205.
24
+ [ Wed Sep 14 18:45:23 2022 ] Top1: 20.46%
25
+ [ Wed Sep 14 18:45:24 2022 ] Top5: 45.79%
26
+ [ Wed Sep 14 18:45:24 2022 ] Training epoch: 4
27
+ [ Wed Sep 14 18:46:19 2022 ] Batch(70/243) done. Loss: 1.2138 lr:0.100000 network_time: 0.0311
28
+ [ Wed Sep 14 18:47:32 2022 ] Batch(170/243) done. Loss: 1.4167 lr:0.100000 network_time: 0.0277
29
+ [ Wed Sep 14 18:48:24 2022 ] Eval epoch: 4
30
+ [ Wed Sep 14 18:49:58 2022 ] Mean test loss of 796 batches: 3.361840009689331.
31
+ [ Wed Sep 14 18:49:58 2022 ] Top1: 22.90%
32
+ [ Wed Sep 14 18:49:59 2022 ] Top5: 50.33%
33
+ [ Wed Sep 14 18:49:59 2022 ] Training epoch: 5
34
+ [ Wed Sep 14 18:50:23 2022 ] Batch(27/243) done. Loss: 1.5494 lr:0.100000 network_time: 0.0280
35
+ [ Wed Sep 14 18:51:35 2022 ] Batch(127/243) done. Loss: 1.2381 lr:0.100000 network_time: 0.0260
36
+ [ Wed Sep 14 18:52:48 2022 ] Batch(227/243) done. Loss: 1.5641 lr:0.100000 network_time: 0.0264
37
+ [ Wed Sep 14 18:52:59 2022 ] Eval epoch: 5
38
+ [ Wed Sep 14 18:54:33 2022 ] Mean test loss of 796 batches: 3.790829658508301.
39
+ [ Wed Sep 14 18:54:34 2022 ] Top1: 20.54%
40
+ [ Wed Sep 14 18:54:34 2022 ] Top5: 45.11%
41
+ [ Wed Sep 14 18:54:34 2022 ] Training epoch: 6
42
+ [ Wed Sep 14 18:55:39 2022 ] Batch(84/243) done. Loss: 1.1953 lr:0.100000 network_time: 0.0270
43
+ [ Wed Sep 14 18:56:52 2022 ] Batch(184/243) done. Loss: 1.1292 lr:0.100000 network_time: 0.0270
44
+ [ Wed Sep 14 18:57:34 2022 ] Eval epoch: 6
45
+ [ Wed Sep 14 18:59:08 2022 ] Mean test loss of 796 batches: 3.924736499786377.
46
+ [ Wed Sep 14 18:59:09 2022 ] Top1: 24.96%
47
+ [ Wed Sep 14 18:59:09 2022 ] Top5: 56.70%
48
+ [ Wed Sep 14 18:59:09 2022 ] Training epoch: 7
49
+ [ Wed Sep 14 18:59:43 2022 ] Batch(41/243) done. Loss: 1.1832 lr:0.100000 network_time: 0.0265
50
+ [ Wed Sep 14 19:00:55 2022 ] Batch(141/243) done. Loss: 1.0630 lr:0.100000 network_time: 0.0269
51
+ [ Wed Sep 14 19:02:08 2022 ] Batch(241/243) done. Loss: 0.9897 lr:0.100000 network_time: 0.0301
52
+ [ Wed Sep 14 19:02:09 2022 ] Eval epoch: 7
53
+ [ Wed Sep 14 19:03:43 2022 ] Mean test loss of 796 batches: 2.814431667327881.
54
+ [ Wed Sep 14 19:03:44 2022 ] Top1: 30.49%
55
+ [ Wed Sep 14 19:03:44 2022 ] Top5: 64.70%
56
+ [ Wed Sep 14 19:03:44 2022 ] Training epoch: 8
57
+ [ Wed Sep 14 19:04:59 2022 ] Batch(98/243) done. Loss: 0.7183 lr:0.100000 network_time: 0.0310
58
+ [ Wed Sep 14 19:06:12 2022 ] Batch(198/243) done. Loss: 0.9279 lr:0.100000 network_time: 0.0288
59
+ [ Wed Sep 14 19:06:44 2022 ] Eval epoch: 8
60
+ [ Wed Sep 14 19:08:17 2022 ] Mean test loss of 796 batches: 3.179093599319458.
61
+ [ Wed Sep 14 19:08:18 2022 ] Top1: 27.80%
62
+ [ Wed Sep 14 19:08:18 2022 ] Top5: 60.67%
63
+ [ Wed Sep 14 19:08:18 2022 ] Training epoch: 9
64
+ [ Wed Sep 14 19:09:02 2022 ] Batch(55/243) done. Loss: 0.9159 lr:0.100000 network_time: 0.0266
65
+ [ Wed Sep 14 19:10:15 2022 ] Batch(155/243) done. Loss: 0.8653 lr:0.100000 network_time: 0.0302
66
+ [ Wed Sep 14 19:11:18 2022 ] Eval epoch: 9
67
+ [ Wed Sep 14 19:12:53 2022 ] Mean test loss of 796 batches: 3.112769603729248.
68
+ [ Wed Sep 14 19:12:53 2022 ] Top1: 29.67%
69
+ [ Wed Sep 14 19:12:53 2022 ] Top5: 64.43%
70
+ [ Wed Sep 14 19:12:54 2022 ] Training epoch: 10
71
+ [ Wed Sep 14 19:13:06 2022 ] Batch(12/243) done. Loss: 1.1210 lr:0.100000 network_time: 0.0301
72
+ [ Wed Sep 14 19:14:19 2022 ] Batch(112/243) done. Loss: 1.0336 lr:0.100000 network_time: 0.0268
73
+ [ Wed Sep 14 19:15:32 2022 ] Batch(212/243) done. Loss: 1.4129 lr:0.100000 network_time: 0.0265
74
+ [ Wed Sep 14 19:15:54 2022 ] Eval epoch: 10
75
+ [ Wed Sep 14 19:17:28 2022 ] Mean test loss of 796 batches: 2.717728614807129.
76
+ [ Wed Sep 14 19:17:28 2022 ] Top1: 36.83%
77
+ [ Wed Sep 14 19:17:28 2022 ] Top5: 71.44%
78
+ [ Wed Sep 14 19:17:29 2022 ] Training epoch: 11
79
+ [ Wed Sep 14 19:18:23 2022 ] Batch(69/243) done. Loss: 0.8001 lr:0.100000 network_time: 0.0280
80
+ [ Wed Sep 14 19:19:36 2022 ] Batch(169/243) done. Loss: 0.6964 lr:0.100000 network_time: 0.0265
81
+ [ Wed Sep 14 19:20:29 2022 ] Eval epoch: 11
82
+ [ Wed Sep 14 19:22:02 2022 ] Mean test loss of 796 batches: 2.9447152614593506.
83
+ [ Wed Sep 14 19:22:03 2022 ] Top1: 35.68%
84
+ [ Wed Sep 14 19:22:03 2022 ] Top5: 69.27%
85
+ [ Wed Sep 14 19:22:03 2022 ] Training epoch: 12
86
+ [ Wed Sep 14 19:22:26 2022 ] Batch(26/243) done. Loss: 0.4444 lr:0.100000 network_time: 0.0287
87
+ [ Wed Sep 14 19:23:39 2022 ] Batch(126/243) done. Loss: 0.7001 lr:0.100000 network_time: 0.0276
88
+ [ Wed Sep 14 19:24:52 2022 ] Batch(226/243) done. Loss: 1.1727 lr:0.100000 network_time: 0.0411
89
+ [ Wed Sep 14 19:25:03 2022 ] Eval epoch: 12
90
+ [ Wed Sep 14 19:26:37 2022 ] Mean test loss of 796 batches: 2.774251699447632.
91
+ [ Wed Sep 14 19:26:37 2022 ] Top1: 36.09%
92
+ [ Wed Sep 14 19:26:38 2022 ] Top5: 70.66%
93
+ [ Wed Sep 14 19:26:38 2022 ] Training epoch: 13
94
+ [ Wed Sep 14 19:27:42 2022 ] Batch(83/243) done. Loss: 0.6696 lr:0.100000 network_time: 0.0266
95
+ [ Wed Sep 14 19:28:55 2022 ] Batch(183/243) done. Loss: 0.6523 lr:0.100000 network_time: 0.0307
96
+ [ Wed Sep 14 19:29:38 2022 ] Eval epoch: 13
97
+ [ Wed Sep 14 19:31:12 2022 ] Mean test loss of 796 batches: 3.165262460708618.
98
+ [ Wed Sep 14 19:31:12 2022 ] Top1: 31.68%
99
+ [ Wed Sep 14 19:31:13 2022 ] Top5: 66.37%
100
+ [ Wed Sep 14 19:31:13 2022 ] Training epoch: 14
101
+ [ Wed Sep 14 19:31:45 2022 ] Batch(40/243) done. Loss: 0.4915 lr:0.100000 network_time: 0.0273
102
+ [ Wed Sep 14 19:32:58 2022 ] Batch(140/243) done. Loss: 0.9805 lr:0.100000 network_time: 0.0271
103
+ [ Wed Sep 14 19:34:11 2022 ] Batch(240/243) done. Loss: 0.8167 lr:0.100000 network_time: 0.0306
104
+ [ Wed Sep 14 19:34:13 2022 ] Eval epoch: 14
105
+ [ Wed Sep 14 19:35:47 2022 ] Mean test loss of 796 batches: 2.915220022201538.
106
+ [ Wed Sep 14 19:35:47 2022 ] Top1: 37.93%
107
+ [ Wed Sep 14 19:35:47 2022 ] Top5: 70.42%
108
+ [ Wed Sep 14 19:35:48 2022 ] Training epoch: 15
109
+ [ Wed Sep 14 19:37:02 2022 ] Batch(97/243) done. Loss: 0.7217 lr:0.100000 network_time: 0.0317
110
+ [ Wed Sep 14 19:38:15 2022 ] Batch(197/243) done. Loss: 0.4206 lr:0.100000 network_time: 0.0292
111
+ [ Wed Sep 14 19:38:48 2022 ] Eval epoch: 15
112
+ [ Wed Sep 14 19:40:22 2022 ] Mean test loss of 796 batches: 3.5348544120788574.
113
+ [ Wed Sep 14 19:40:22 2022 ] Top1: 31.55%
114
+ [ Wed Sep 14 19:40:23 2022 ] Top5: 65.92%
115
+ [ Wed Sep 14 19:40:23 2022 ] Training epoch: 16
116
+ [ Wed Sep 14 19:41:06 2022 ] Batch(54/243) done. Loss: 0.5286 lr:0.100000 network_time: 0.0268
117
+ [ Wed Sep 14 19:42:19 2022 ] Batch(154/243) done. Loss: 0.6408 lr:0.100000 network_time: 0.0269
118
+ [ Wed Sep 14 19:43:23 2022 ] Eval epoch: 16
119
+ [ Wed Sep 14 19:44:57 2022 ] Mean test loss of 796 batches: 3.115510940551758.
120
+ [ Wed Sep 14 19:44:57 2022 ] Top1: 35.87%
121
+ [ Wed Sep 14 19:44:58 2022 ] Top5: 69.59%
122
+ [ Wed Sep 14 19:44:58 2022 ] Training epoch: 17
123
+ [ Wed Sep 14 19:45:10 2022 ] Batch(11/243) done. Loss: 0.7054 lr:0.100000 network_time: 0.0268
124
+ [ Wed Sep 14 19:46:22 2022 ] Batch(111/243) done. Loss: 0.6154 lr:0.100000 network_time: 0.0281
125
+ [ Wed Sep 14 19:47:35 2022 ] Batch(211/243) done. Loss: 0.9023 lr:0.100000 network_time: 0.0271
126
+ [ Wed Sep 14 19:47:58 2022 ] Eval epoch: 17
127
+ [ Wed Sep 14 19:49:32 2022 ] Mean test loss of 796 batches: 3.3053030967712402.
128
+ [ Wed Sep 14 19:49:33 2022 ] Top1: 35.22%
129
+ [ Wed Sep 14 19:49:33 2022 ] Top5: 68.90%
130
+ [ Wed Sep 14 19:49:34 2022 ] Training epoch: 18
131
+ [ Wed Sep 14 19:50:27 2022 ] Batch(68/243) done. Loss: 0.7232 lr:0.100000 network_time: 0.0274
132
+ [ Wed Sep 14 19:51:40 2022 ] Batch(168/243) done. Loss: 0.4455 lr:0.100000 network_time: 0.0272
133
+ [ Wed Sep 14 19:52:34 2022 ] Eval epoch: 18
134
+ [ Wed Sep 14 19:54:07 2022 ] Mean test loss of 796 batches: 2.651683807373047.
135
+ [ Wed Sep 14 19:54:08 2022 ] Top1: 40.03%
136
+ [ Wed Sep 14 19:54:08 2022 ] Top5: 74.53%
137
+ [ Wed Sep 14 19:54:08 2022 ] Training epoch: 19
138
+ [ Wed Sep 14 19:54:30 2022 ] Batch(25/243) done. Loss: 0.5252 lr:0.100000 network_time: 0.0248
139
+ [ Wed Sep 14 19:55:43 2022 ] Batch(125/243) done. Loss: 0.5577 lr:0.100000 network_time: 0.0294
140
+ [ Wed Sep 14 19:56:56 2022 ] Batch(225/243) done. Loss: 0.8240 lr:0.100000 network_time: 0.0261
141
+ [ Wed Sep 14 19:57:08 2022 ] Eval epoch: 19
142
+ [ Wed Sep 14 19:58:42 2022 ] Mean test loss of 796 batches: 2.9685568809509277.
143
+ [ Wed Sep 14 19:58:42 2022 ] Top1: 39.34%
144
+ [ Wed Sep 14 19:58:43 2022 ] Top5: 74.07%
145
+ [ Wed Sep 14 19:58:43 2022 ] Training epoch: 20
146
+ [ Wed Sep 14 19:59:46 2022 ] Batch(82/243) done. Loss: 0.5369 lr:0.100000 network_time: 0.0301
147
+ [ Wed Sep 14 20:00:59 2022 ] Batch(182/243) done. Loss: 0.5425 lr:0.100000 network_time: 0.0310
148
+ [ Wed Sep 14 20:01:43 2022 ] Eval epoch: 20
149
+ [ Wed Sep 14 20:03:17 2022 ] Mean test loss of 796 batches: 2.9139246940612793.
150
+ [ Wed Sep 14 20:03:18 2022 ] Top1: 38.98%
151
+ [ Wed Sep 14 20:03:18 2022 ] Top5: 73.25%
152
+ [ Wed Sep 14 20:03:19 2022 ] Training epoch: 21
153
+ [ Wed Sep 14 20:03:51 2022 ] Batch(39/243) done. Loss: 0.3098 lr:0.100000 network_time: 0.0345
154
+ [ Wed Sep 14 20:05:04 2022 ] Batch(139/243) done. Loss: 0.5166 lr:0.100000 network_time: 0.0275
155
+ [ Wed Sep 14 20:06:16 2022 ] Batch(239/243) done. Loss: 0.6568 lr:0.100000 network_time: 0.0268
156
+ [ Wed Sep 14 20:06:19 2022 ] Eval epoch: 21
157
+ [ Wed Sep 14 20:07:52 2022 ] Mean test loss of 796 batches: 3.3424649238586426.
158
+ [ Wed Sep 14 20:07:53 2022 ] Top1: 36.41%
159
+ [ Wed Sep 14 20:07:54 2022 ] Top5: 70.73%
160
+ [ Wed Sep 14 20:07:54 2022 ] Training epoch: 22
161
+ [ Wed Sep 14 20:09:08 2022 ] Batch(96/243) done. Loss: 0.4496 lr:0.100000 network_time: 0.0277
162
+ [ Wed Sep 14 20:10:21 2022 ] Batch(196/243) done. Loss: 0.4193 lr:0.100000 network_time: 0.0317
163
+ [ Wed Sep 14 20:10:54 2022 ] Eval epoch: 22
164
+ [ Wed Sep 14 20:12:28 2022 ] Mean test loss of 796 batches: 2.9043772220611572.
165
+ [ Wed Sep 14 20:12:28 2022 ] Top1: 40.20%
166
+ [ Wed Sep 14 20:12:29 2022 ] Top5: 73.53%
167
+ [ Wed Sep 14 20:12:29 2022 ] Training epoch: 23
168
+ [ Wed Sep 14 20:13:11 2022 ] Batch(53/243) done. Loss: 0.3166 lr:0.100000 network_time: 0.0272
169
+ [ Wed Sep 14 20:14:24 2022 ] Batch(153/243) done. Loss: 0.5057 lr:0.100000 network_time: 0.0278
170
+ [ Wed Sep 14 20:15:29 2022 ] Eval epoch: 23
171
+ [ Wed Sep 14 20:17:02 2022 ] Mean test loss of 796 batches: 2.937326669692993.
172
+ [ Wed Sep 14 20:17:03 2022 ] Top1: 39.94%
173
+ [ Wed Sep 14 20:17:04 2022 ] Top5: 73.01%
174
+ [ Wed Sep 14 20:17:04 2022 ] Training epoch: 24
175
+ [ Wed Sep 14 20:17:15 2022 ] Batch(10/243) done. Loss: 0.3329 lr:0.100000 network_time: 0.0259
176
+ [ Wed Sep 14 20:18:27 2022 ] Batch(110/243) done. Loss: 0.6170 lr:0.100000 network_time: 0.0266
177
+ [ Wed Sep 14 20:19:40 2022 ] Batch(210/243) done. Loss: 0.5816 lr:0.100000 network_time: 0.0272
178
+ [ Wed Sep 14 20:20:04 2022 ] Eval epoch: 24
179
+ [ Wed Sep 14 20:21:37 2022 ] Mean test loss of 796 batches: 3.271789312362671.
180
+ [ Wed Sep 14 20:21:37 2022 ] Top1: 39.86%
181
+ [ Wed Sep 14 20:21:38 2022 ] Top5: 73.14%
182
+ [ Wed Sep 14 20:21:38 2022 ] Training epoch: 25
183
+ [ Wed Sep 14 20:22:30 2022 ] Batch(67/243) done. Loss: 0.4282 lr:0.100000 network_time: 0.0282
184
+ [ Wed Sep 14 20:23:43 2022 ] Batch(167/243) done. Loss: 0.4217 lr:0.100000 network_time: 0.0306
185
+ [ Wed Sep 14 20:24:38 2022 ] Eval epoch: 25
186
+ [ Wed Sep 14 20:26:11 2022 ] Mean test loss of 796 batches: 3.2710368633270264.
187
+ [ Wed Sep 14 20:26:12 2022 ] Top1: 38.82%
188
+ [ Wed Sep 14 20:26:12 2022 ] Top5: 73.17%
189
+ [ Wed Sep 14 20:26:12 2022 ] Training epoch: 26
190
+ [ Wed Sep 14 20:26:33 2022 ] Batch(24/243) done. Loss: 0.4654 lr:0.100000 network_time: 0.0270
191
+ [ Wed Sep 14 20:27:46 2022 ] Batch(124/243) done. Loss: 0.5090 lr:0.100000 network_time: 0.0255
192
+ [ Wed Sep 14 20:28:59 2022 ] Batch(224/243) done. Loss: 0.3150 lr:0.100000 network_time: 0.0314
193
+ [ Wed Sep 14 20:29:12 2022 ] Eval epoch: 26
194
+ [ Wed Sep 14 20:30:46 2022 ] Mean test loss of 796 batches: 3.2111380100250244.
195
+ [ Wed Sep 14 20:30:47 2022 ] Top1: 36.32%
196
+ [ Wed Sep 14 20:30:47 2022 ] Top5: 71.37%
197
+ [ Wed Sep 14 20:30:47 2022 ] Training epoch: 27
198
+ [ Wed Sep 14 20:31:50 2022 ] Batch(81/243) done. Loss: 0.4068 lr:0.100000 network_time: 0.0271
199
+ [ Wed Sep 14 20:33:03 2022 ] Batch(181/243) done. Loss: 0.3918 lr:0.100000 network_time: 0.0271
200
+ [ Wed Sep 14 20:33:48 2022 ] Eval epoch: 27
201
+ [ Wed Sep 14 20:35:22 2022 ] Mean test loss of 796 batches: 2.908709764480591.
202
+ [ Wed Sep 14 20:35:22 2022 ] Top1: 39.33%
203
+ [ Wed Sep 14 20:35:23 2022 ] Top5: 74.25%
204
+ [ Wed Sep 14 20:35:23 2022 ] Training epoch: 28
205
+ [ Wed Sep 14 20:35:55 2022 ] Batch(38/243) done. Loss: 0.2538 lr:0.100000 network_time: 0.0294
206
+ [ Wed Sep 14 20:37:08 2022 ] Batch(138/243) done. Loss: 0.4274 lr:0.100000 network_time: 0.0264
207
+ [ Wed Sep 14 20:38:20 2022 ] Batch(238/243) done. Loss: 0.5532 lr:0.100000 network_time: 0.0310
208
+ [ Wed Sep 14 20:38:24 2022 ] Eval epoch: 28
209
+ [ Wed Sep 14 20:39:57 2022 ] Mean test loss of 796 batches: 3.379927396774292.
210
+ [ Wed Sep 14 20:39:58 2022 ] Top1: 38.54%
211
+ [ Wed Sep 14 20:39:58 2022 ] Top5: 72.27%
212
+ [ Wed Sep 14 20:39:59 2022 ] Training epoch: 29
213
+ [ Wed Sep 14 20:41:12 2022 ] Batch(95/243) done. Loss: 0.4147 lr:0.100000 network_time: 0.0274
214
+ [ Wed Sep 14 20:42:25 2022 ] Batch(195/243) done. Loss: 0.4782 lr:0.100000 network_time: 0.0301
215
+ [ Wed Sep 14 20:42:59 2022 ] Eval epoch: 29
216
+ [ Wed Sep 14 20:44:33 2022 ] Mean test loss of 796 batches: 2.9250919818878174.
217
+ [ Wed Sep 14 20:44:33 2022 ] Top1: 43.48%
218
+ [ Wed Sep 14 20:44:34 2022 ] Top5: 76.24%
219
+ [ Wed Sep 14 20:44:34 2022 ] Training epoch: 30
220
+ [ Wed Sep 14 20:45:16 2022 ] Batch(52/243) done. Loss: 0.1942 lr:0.100000 network_time: 0.0273
221
+ [ Wed Sep 14 20:46:29 2022 ] Batch(152/243) done. Loss: 0.2228 lr:0.100000 network_time: 0.0276
222
+ [ Wed Sep 14 20:47:35 2022 ] Eval epoch: 30
223
+ [ Wed Sep 14 20:49:09 2022 ] Mean test loss of 796 batches: 2.907973289489746.
224
+ [ Wed Sep 14 20:49:09 2022 ] Top1: 43.96%
225
+ [ Wed Sep 14 20:49:10 2022 ] Top5: 76.37%
226
+ [ Wed Sep 14 20:49:10 2022 ] Training epoch: 31
227
+ [ Wed Sep 14 20:49:20 2022 ] Batch(9/243) done. Loss: 0.3925 lr:0.100000 network_time: 0.0272
228
+ [ Wed Sep 14 20:50:33 2022 ] Batch(109/243) done. Loss: 0.3682 lr:0.100000 network_time: 0.0276
229
+ [ Wed Sep 14 20:51:46 2022 ] Batch(209/243) done. Loss: 0.2874 lr:0.100000 network_time: 0.0314
230
+ [ Wed Sep 14 20:52:10 2022 ] Eval epoch: 31
231
+ [ Wed Sep 14 20:53:44 2022 ] Mean test loss of 796 batches: 3.1756463050842285.
232
+ [ Wed Sep 14 20:53:44 2022 ] Top1: 40.28%
233
+ [ Wed Sep 14 20:53:44 2022 ] Top5: 73.68%
234
+ [ Wed Sep 14 20:53:45 2022 ] Training epoch: 32
235
+ [ Wed Sep 14 20:54:37 2022 ] Batch(66/243) done. Loss: 0.3473 lr:0.100000 network_time: 0.0262
236
+ [ Wed Sep 14 20:55:49 2022 ] Batch(166/243) done. Loss: 0.5482 lr:0.100000 network_time: 0.0281
237
+ [ Wed Sep 14 20:56:45 2022 ] Eval epoch: 32
238
+ [ Wed Sep 14 20:58:18 2022 ] Mean test loss of 796 batches: 2.8646881580352783.
239
+ [ Wed Sep 14 20:58:18 2022 ] Top1: 42.91%
240
+ [ Wed Sep 14 20:58:19 2022 ] Top5: 75.13%
241
+ [ Wed Sep 14 20:58:19 2022 ] Training epoch: 33
242
+ [ Wed Sep 14 20:58:39 2022 ] Batch(23/243) done. Loss: 0.2494 lr:0.100000 network_time: 0.0270
243
+ [ Wed Sep 14 20:59:52 2022 ] Batch(123/243) done. Loss: 0.6065 lr:0.100000 network_time: 0.0272
244
+ [ Wed Sep 14 21:01:05 2022 ] Batch(223/243) done. Loss: 0.6795 lr:0.100000 network_time: 0.0266
245
+ [ Wed Sep 14 21:01:19 2022 ] Eval epoch: 33
246
+ [ Wed Sep 14 21:02:53 2022 ] Mean test loss of 796 batches: 3.292654037475586.
247
+ [ Wed Sep 14 21:02:53 2022 ] Top1: 36.59%
248
+ [ Wed Sep 14 21:02:54 2022 ] Top5: 70.07%
249
+ [ Wed Sep 14 21:02:54 2022 ] Training epoch: 34
250
+ [ Wed Sep 14 21:03:56 2022 ] Batch(80/243) done. Loss: 0.3388 lr:0.100000 network_time: 0.0272
251
+ [ Wed Sep 14 21:05:09 2022 ] Batch(180/243) done. Loss: 0.3675 lr:0.100000 network_time: 0.0472
252
+ [ Wed Sep 14 21:05:54 2022 ] Eval epoch: 34
253
+ [ Wed Sep 14 21:07:27 2022 ] Mean test loss of 796 batches: 3.1438148021698.
254
+ [ Wed Sep 14 21:07:28 2022 ] Top1: 43.58%
255
+ [ Wed Sep 14 21:07:28 2022 ] Top5: 75.90%
256
+ [ Wed Sep 14 21:07:28 2022 ] Training epoch: 35
257
+ [ Wed Sep 14 21:07:59 2022 ] Batch(37/243) done. Loss: 0.2174 lr:0.100000 network_time: 0.0276
258
+ [ Wed Sep 14 21:09:12 2022 ] Batch(137/243) done. Loss: 0.2229 lr:0.100000 network_time: 0.0279
259
+ [ Wed Sep 14 21:10:25 2022 ] Batch(237/243) done. Loss: 0.4025 lr:0.100000 network_time: 0.0266
260
+ [ Wed Sep 14 21:10:29 2022 ] Eval epoch: 35
261
+ [ Wed Sep 14 21:12:02 2022 ] Mean test loss of 796 batches: 3.187361717224121.
262
+ [ Wed Sep 14 21:12:03 2022 ] Top1: 41.11%
263
+ [ Wed Sep 14 21:12:03 2022 ] Top5: 74.95%
264
+ [ Wed Sep 14 21:12:04 2022 ] Training epoch: 36
265
+ [ Wed Sep 14 21:13:16 2022 ] Batch(94/243) done. Loss: 0.3044 lr:0.100000 network_time: 0.0252
266
+ [ Wed Sep 14 21:14:28 2022 ] Batch(194/243) done. Loss: 0.1637 lr:0.100000 network_time: 0.0259
267
+ [ Wed Sep 14 21:15:04 2022 ] Eval epoch: 36
268
+ [ Wed Sep 14 21:16:37 2022 ] Mean test loss of 796 batches: 2.897453546524048.
269
+ [ Wed Sep 14 21:16:37 2022 ] Top1: 41.74%
270
+ [ Wed Sep 14 21:16:38 2022 ] Top5: 75.11%
271
+ [ Wed Sep 14 21:16:38 2022 ] Training epoch: 37
272
+ [ Wed Sep 14 21:17:19 2022 ] Batch(51/243) done. Loss: 0.2264 lr:0.100000 network_time: 0.0268
273
+ [ Wed Sep 14 21:18:31 2022 ] Batch(151/243) done. Loss: 0.2391 lr:0.100000 network_time: 0.0278
274
+ [ Wed Sep 14 21:19:38 2022 ] Eval epoch: 37
275
+ [ Wed Sep 14 21:21:11 2022 ] Mean test loss of 796 batches: 3.246755838394165.
276
+ [ Wed Sep 14 21:21:12 2022 ] Top1: 42.70%
277
+ [ Wed Sep 14 21:21:12 2022 ] Top5: 75.04%
278
+ [ Wed Sep 14 21:21:13 2022 ] Training epoch: 38
279
+ [ Wed Sep 14 21:21:22 2022 ] Batch(8/243) done. Loss: 0.2202 lr:0.100000 network_time: 0.0275
280
+ [ Wed Sep 14 21:22:35 2022 ] Batch(108/243) done. Loss: 0.1588 lr:0.100000 network_time: 0.0324
281
+ [ Wed Sep 14 21:23:48 2022 ] Batch(208/243) done. Loss: 0.3176 lr:0.100000 network_time: 0.0308
282
+ [ Wed Sep 14 21:24:13 2022 ] Eval epoch: 38
283
+ [ Wed Sep 14 21:25:47 2022 ] Mean test loss of 796 batches: 3.2101247310638428.
284
+ [ Wed Sep 14 21:25:47 2022 ] Top1: 41.23%
285
+ [ Wed Sep 14 21:25:48 2022 ] Top5: 73.09%
286
+ [ Wed Sep 14 21:25:48 2022 ] Training epoch: 39
287
+ [ Wed Sep 14 21:26:39 2022 ] Batch(65/243) done. Loss: 0.4692 lr:0.100000 network_time: 0.0313
288
+ [ Wed Sep 14 21:27:52 2022 ] Batch(165/243) done. Loss: 0.2537 lr:0.100000 network_time: 0.0323
289
+ [ Wed Sep 14 21:28:48 2022 ] Eval epoch: 39
290
+ [ Wed Sep 14 21:30:21 2022 ] Mean test loss of 796 batches: 3.237701177597046.
291
+ [ Wed Sep 14 21:30:21 2022 ] Top1: 41.29%
292
+ [ Wed Sep 14 21:30:22 2022 ] Top5: 75.22%
293
+ [ Wed Sep 14 21:30:22 2022 ] Training epoch: 40
294
+ [ Wed Sep 14 21:30:42 2022 ] Batch(22/243) done. Loss: 0.2544 lr:0.100000 network_time: 0.0315
295
+ [ Wed Sep 14 21:31:55 2022 ] Batch(122/243) done. Loss: 0.4558 lr:0.100000 network_time: 0.0282
296
+ [ Wed Sep 14 21:33:08 2022 ] Batch(222/243) done. Loss: 0.2960 lr:0.100000 network_time: 0.0316
297
+ [ Wed Sep 14 21:33:23 2022 ] Eval epoch: 40
298
+ [ Wed Sep 14 21:34:56 2022 ] Mean test loss of 796 batches: 3.0716347694396973.
299
+ [ Wed Sep 14 21:34:56 2022 ] Top1: 43.59%
300
+ [ Wed Sep 14 21:34:57 2022 ] Top5: 77.24%
301
+ [ Wed Sep 14 21:34:57 2022 ] Training epoch: 41
302
+ [ Wed Sep 14 21:35:58 2022 ] Batch(79/243) done. Loss: 0.1472 lr:0.100000 network_time: 0.0280
303
+ [ Wed Sep 14 21:37:11 2022 ] Batch(179/243) done. Loss: 0.2046 lr:0.100000 network_time: 0.0315
304
+ [ Wed Sep 14 21:37:57 2022 ] Eval epoch: 41
305
+ [ Wed Sep 14 21:39:31 2022 ] Mean test loss of 796 batches: 3.687352180480957.
306
+ [ Wed Sep 14 21:39:31 2022 ] Top1: 35.92%
307
+ [ Wed Sep 14 21:39:31 2022 ] Top5: 70.46%
308
+ [ Wed Sep 14 21:39:32 2022 ] Training epoch: 42
309
+ [ Wed Sep 14 21:40:02 2022 ] Batch(36/243) done. Loss: 0.2597 lr:0.100000 network_time: 0.0262
310
+ [ Wed Sep 14 21:41:14 2022 ] Batch(136/243) done. Loss: 0.2971 lr:0.100000 network_time: 0.0266
311
+ [ Wed Sep 14 21:42:27 2022 ] Batch(236/243) done. Loss: 0.2494 lr:0.100000 network_time: 0.0274
312
+ [ Wed Sep 14 21:42:32 2022 ] Eval epoch: 42
313
+ [ Wed Sep 14 21:44:06 2022 ] Mean test loss of 796 batches: 2.7111852169036865.
314
+ [ Wed Sep 14 21:44:06 2022 ] Top1: 45.89%
315
+ [ Wed Sep 14 21:44:06 2022 ] Top5: 78.77%
316
+ [ Wed Sep 14 21:44:07 2022 ] Training epoch: 43
317
+ [ Wed Sep 14 21:45:18 2022 ] Batch(93/243) done. Loss: 0.1261 lr:0.100000 network_time: 0.0279
318
+ [ Wed Sep 14 21:46:31 2022 ] Batch(193/243) done. Loss: 0.2820 lr:0.100000 network_time: 0.0275
319
+ [ Wed Sep 14 21:47:07 2022 ] Eval epoch: 43
320
+ [ Wed Sep 14 21:48:40 2022 ] Mean test loss of 796 batches: 3.5541832447052.
321
+ [ Wed Sep 14 21:48:41 2022 ] Top1: 38.99%
322
+ [ Wed Sep 14 21:48:41 2022 ] Top5: 71.08%
323
+ [ Wed Sep 14 21:48:41 2022 ] Training epoch: 44
324
+ [ Wed Sep 14 21:49:21 2022 ] Batch(50/243) done. Loss: 0.1867 lr:0.100000 network_time: 0.0317
325
+ [ Wed Sep 14 21:50:34 2022 ] Batch(150/243) done. Loss: 0.1899 lr:0.100000 network_time: 0.0264
326
+ [ Wed Sep 14 21:51:41 2022 ] Eval epoch: 44
327
+ [ Wed Sep 14 21:53:15 2022 ] Mean test loss of 796 batches: 3.259566307067871.
328
+ [ Wed Sep 14 21:53:15 2022 ] Top1: 39.92%
329
+ [ Wed Sep 14 21:53:16 2022 ] Top5: 72.24%
330
+ [ Wed Sep 14 21:53:16 2022 ] Training epoch: 45
331
+ [ Wed Sep 14 21:53:25 2022 ] Batch(7/243) done. Loss: 0.1851 lr:0.100000 network_time: 0.0324
332
+ [ Wed Sep 14 21:54:37 2022 ] Batch(107/243) done. Loss: 0.1600 lr:0.100000 network_time: 0.0284
333
+ [ Wed Sep 14 21:55:50 2022 ] Batch(207/243) done. Loss: 0.1799 lr:0.100000 network_time: 0.0264
334
+ [ Wed Sep 14 21:56:16 2022 ] Eval epoch: 45
335
+ [ Wed Sep 14 21:57:49 2022 ] Mean test loss of 796 batches: 3.1690220832824707.
336
+ [ Wed Sep 14 21:57:50 2022 ] Top1: 42.68%
337
+ [ Wed Sep 14 21:57:50 2022 ] Top5: 75.85%
338
+ [ Wed Sep 14 21:57:50 2022 ] Training epoch: 46
339
+ [ Wed Sep 14 21:58:41 2022 ] Batch(64/243) done. Loss: 0.3256 lr:0.100000 network_time: 0.0275
340
+ [ Wed Sep 14 21:59:54 2022 ] Batch(164/243) done. Loss: 0.2898 lr:0.100000 network_time: 0.0268
341
+ [ Wed Sep 14 22:00:51 2022 ] Eval epoch: 46
342
+ [ Wed Sep 14 22:02:24 2022 ] Mean test loss of 796 batches: 3.144570827484131.
343
+ [ Wed Sep 14 22:02:24 2022 ] Top1: 43.82%
344
+ [ Wed Sep 14 22:02:25 2022 ] Top5: 75.41%
345
+ [ Wed Sep 14 22:02:25 2022 ] Training epoch: 47
346
+ [ Wed Sep 14 22:02:44 2022 ] Batch(21/243) done. Loss: 0.2113 lr:0.100000 network_time: 0.0269
347
+ [ Wed Sep 14 22:03:57 2022 ] Batch(121/243) done. Loss: 0.3029 lr:0.100000 network_time: 0.0264
348
+ [ Wed Sep 14 22:05:09 2022 ] Batch(221/243) done. Loss: 0.2827 lr:0.100000 network_time: 0.0263
349
+ [ Wed Sep 14 22:05:25 2022 ] Eval epoch: 47
350
+ [ Wed Sep 14 22:06:59 2022 ] Mean test loss of 796 batches: 3.208387613296509.
351
+ [ Wed Sep 14 22:06:59 2022 ] Top1: 43.43%
352
+ [ Wed Sep 14 22:06:59 2022 ] Top5: 75.61%
353
+ [ Wed Sep 14 22:07:00 2022 ] Training epoch: 48
354
+ [ Wed Sep 14 22:08:00 2022 ] Batch(78/243) done. Loss: 0.2341 lr:0.100000 network_time: 0.0262
355
+ [ Wed Sep 14 22:09:13 2022 ] Batch(178/243) done. Loss: 0.2231 lr:0.100000 network_time: 0.0272
356
+ [ Wed Sep 14 22:10:00 2022 ] Eval epoch: 48
357
+ [ Wed Sep 14 22:11:33 2022 ] Mean test loss of 796 batches: 3.4265151023864746.
358
+ [ Wed Sep 14 22:11:34 2022 ] Top1: 41.23%
359
+ [ Wed Sep 14 22:11:34 2022 ] Top5: 73.94%
360
+ [ Wed Sep 14 22:11:34 2022 ] Training epoch: 49
361
+ [ Wed Sep 14 22:12:03 2022 ] Batch(35/243) done. Loss: 0.3541 lr:0.100000 network_time: 0.0265
362
+ [ Wed Sep 14 22:13:16 2022 ] Batch(135/243) done. Loss: 0.2556 lr:0.100000 network_time: 0.0268
363
+ [ Wed Sep 14 22:14:29 2022 ] Batch(235/243) done. Loss: 0.1613 lr:0.100000 network_time: 0.0310
364
+ [ Wed Sep 14 22:14:34 2022 ] Eval epoch: 49
365
+ [ Wed Sep 14 22:16:08 2022 ] Mean test loss of 796 batches: 3.0899510383605957.
366
+ [ Wed Sep 14 22:16:08 2022 ] Top1: 42.97%
367
+ [ Wed Sep 14 22:16:09 2022 ] Top5: 76.07%
368
+ [ Wed Sep 14 22:16:09 2022 ] Training epoch: 50
369
+ [ Wed Sep 14 22:17:19 2022 ] Batch(92/243) done. Loss: 0.2175 lr:0.100000 network_time: 0.0271
370
+ [ Wed Sep 14 22:18:32 2022 ] Batch(192/243) done. Loss: 0.3184 lr:0.100000 network_time: 0.0277
371
+ [ Wed Sep 14 22:19:09 2022 ] Eval epoch: 50
372
+ [ Wed Sep 14 22:20:42 2022 ] Mean test loss of 796 batches: 3.1481566429138184.
373
+ [ Wed Sep 14 22:20:42 2022 ] Top1: 41.54%
374
+ [ Wed Sep 14 22:20:43 2022 ] Top5: 73.69%
375
+ [ Wed Sep 14 22:20:43 2022 ] Training epoch: 51
376
+ [ Wed Sep 14 22:21:23 2022 ] Batch(49/243) done. Loss: 0.2181 lr:0.100000 network_time: 0.0286
377
+ [ Wed Sep 14 22:22:36 2022 ] Batch(149/243) done. Loss: 0.2625 lr:0.100000 network_time: 0.0268
378
+ [ Wed Sep 14 22:23:44 2022 ] Eval epoch: 51
379
+ [ Wed Sep 14 22:25:17 2022 ] Mean test loss of 796 batches: 2.987004041671753.
380
+ [ Wed Sep 14 22:25:18 2022 ] Top1: 46.25%
381
+ [ Wed Sep 14 22:25:18 2022 ] Top5: 77.31%
382
+ [ Wed Sep 14 22:25:18 2022 ] Training epoch: 52
383
+ [ Wed Sep 14 22:25:26 2022 ] Batch(6/243) done. Loss: 0.1527 lr:0.100000 network_time: 0.0295
384
+ [ Wed Sep 14 22:26:39 2022 ] Batch(106/243) done. Loss: 0.1457 lr:0.100000 network_time: 0.0270
385
+ [ Wed Sep 14 22:27:52 2022 ] Batch(206/243) done. Loss: 0.2779 lr:0.100000 network_time: 0.0280
386
+ [ Wed Sep 14 22:28:18 2022 ] Eval epoch: 52
387
+ [ Wed Sep 14 22:29:51 2022 ] Mean test loss of 796 batches: 3.231384515762329.
388
+ [ Wed Sep 14 22:29:52 2022 ] Top1: 42.97%
389
+ [ Wed Sep 14 22:29:52 2022 ] Top5: 75.65%
390
+ [ Wed Sep 14 22:29:52 2022 ] Training epoch: 53
391
+ [ Wed Sep 14 22:30:42 2022 ] Batch(63/243) done. Loss: 0.2154 lr:0.100000 network_time: 0.0283
392
+ [ Wed Sep 14 22:31:55 2022 ] Batch(163/243) done. Loss: 0.3158 lr:0.100000 network_time: 0.0274
393
+ [ Wed Sep 14 22:32:53 2022 ] Eval epoch: 53
394
+ [ Wed Sep 14 22:34:26 2022 ] Mean test loss of 796 batches: 2.9344334602355957.
395
+ [ Wed Sep 14 22:34:27 2022 ] Top1: 45.77%
396
+ [ Wed Sep 14 22:34:27 2022 ] Top5: 78.17%
397
+ [ Wed Sep 14 22:34:27 2022 ] Training epoch: 54
398
+ [ Wed Sep 14 22:34:45 2022 ] Batch(20/243) done. Loss: 0.2143 lr:0.100000 network_time: 0.0281
399
+ [ Wed Sep 14 22:35:58 2022 ] Batch(120/243) done. Loss: 0.1561 lr:0.100000 network_time: 0.0276
400
+ [ Wed Sep 14 22:37:11 2022 ] Batch(220/243) done. Loss: 0.1489 lr:0.100000 network_time: 0.0270
401
+ [ Wed Sep 14 22:37:28 2022 ] Eval epoch: 54
402
+ [ Wed Sep 14 22:39:01 2022 ] Mean test loss of 796 batches: 3.3158111572265625.
403
+ [ Wed Sep 14 22:39:01 2022 ] Top1: 43.19%
404
+ [ Wed Sep 14 22:39:02 2022 ] Top5: 76.21%
405
+ [ Wed Sep 14 22:39:02 2022 ] Training epoch: 55
406
+ [ Wed Sep 14 22:40:02 2022 ] Batch(77/243) done. Loss: 0.2478 lr:0.100000 network_time: 0.0276
407
+ [ Wed Sep 14 22:41:15 2022 ] Batch(177/243) done. Loss: 0.3717 lr:0.100000 network_time: 0.0268
408
+ [ Wed Sep 14 22:42:02 2022 ] Eval epoch: 55
409
+ [ Wed Sep 14 22:43:36 2022 ] Mean test loss of 796 batches: 3.565183401107788.
410
+ [ Wed Sep 14 22:43:36 2022 ] Top1: 40.67%
411
+ [ Wed Sep 14 22:43:37 2022 ] Top5: 74.40%
412
+ [ Wed Sep 14 22:43:37 2022 ] Training epoch: 56
413
+ [ Wed Sep 14 22:44:05 2022 ] Batch(34/243) done. Loss: 0.1565 lr:0.100000 network_time: 0.0274
414
+ [ Wed Sep 14 22:45:18 2022 ] Batch(134/243) done. Loss: 0.3081 lr:0.100000 network_time: 0.0287
415
+ [ Wed Sep 14 22:46:31 2022 ] Batch(234/243) done. Loss: 0.1216 lr:0.100000 network_time: 0.0253
416
+ [ Wed Sep 14 22:46:37 2022 ] Eval epoch: 56
417
+ [ Wed Sep 14 22:48:10 2022 ] Mean test loss of 796 batches: 3.3822531700134277.
418
+ [ Wed Sep 14 22:48:11 2022 ] Top1: 42.46%
419
+ [ Wed Sep 14 22:48:11 2022 ] Top5: 75.58%
420
+ [ Wed Sep 14 22:48:11 2022 ] Training epoch: 57
421
+ [ Wed Sep 14 22:49:21 2022 ] Batch(91/243) done. Loss: 0.1433 lr:0.100000 network_time: 0.0309
422
+ [ Wed Sep 14 22:50:34 2022 ] Batch(191/243) done. Loss: 0.2230 lr:0.100000 network_time: 0.0272
423
+ [ Wed Sep 14 22:51:12 2022 ] Eval epoch: 57
424
+ [ Wed Sep 14 22:52:46 2022 ] Mean test loss of 796 batches: 3.307635545730591.
425
+ [ Wed Sep 14 22:52:47 2022 ] Top1: 41.26%
426
+ [ Wed Sep 14 22:52:47 2022 ] Top5: 73.64%
427
+ [ Wed Sep 14 22:52:48 2022 ] Training epoch: 58
428
+ [ Wed Sep 14 22:53:26 2022 ] Batch(48/243) done. Loss: 0.2950 lr:0.100000 network_time: 0.0260
429
+ [ Wed Sep 14 22:54:39 2022 ] Batch(148/243) done. Loss: 0.1734 lr:0.100000 network_time: 0.0269
430
+ [ Wed Sep 14 22:55:48 2022 ] Eval epoch: 58
431
+ [ Wed Sep 14 22:57:21 2022 ] Mean test loss of 796 batches: 3.419945240020752.
432
+ [ Wed Sep 14 22:57:22 2022 ] Top1: 42.66%
433
+ [ Wed Sep 14 22:57:22 2022 ] Top5: 74.95%
434
+ [ Wed Sep 14 22:57:22 2022 ] Training epoch: 59
435
+ [ Wed Sep 14 22:57:30 2022 ] Batch(5/243) done. Loss: 0.3432 lr:0.100000 network_time: 0.0278
436
+ [ Wed Sep 14 22:58:43 2022 ] Batch(105/243) done. Loss: 0.3520 lr:0.100000 network_time: 0.0338
437
+ [ Wed Sep 14 22:59:56 2022 ] Batch(205/243) done. Loss: 0.1924 lr:0.100000 network_time: 0.0313
438
+ [ Wed Sep 14 23:00:23 2022 ] Eval epoch: 59
439
+ [ Wed Sep 14 23:01:57 2022 ] Mean test loss of 796 batches: 2.8412914276123047.
440
+ [ Wed Sep 14 23:01:57 2022 ] Top1: 47.84%
441
+ [ Wed Sep 14 23:01:58 2022 ] Top5: 79.38%
442
+ [ Wed Sep 14 23:01:58 2022 ] Training epoch: 60
443
+ [ Wed Sep 14 23:02:47 2022 ] Batch(62/243) done. Loss: 0.1149 lr:0.100000 network_time: 0.0304
444
+ [ Wed Sep 14 23:04:00 2022 ] Batch(162/243) done. Loss: 0.2884 lr:0.100000 network_time: 0.0282
445
+ [ Wed Sep 14 23:04:58 2022 ] Eval epoch: 60
446
+ [ Wed Sep 14 23:06:32 2022 ] Mean test loss of 796 batches: 3.3663244247436523.
447
+ [ Wed Sep 14 23:06:33 2022 ] Top1: 42.97%
448
+ [ Wed Sep 14 23:06:33 2022 ] Top5: 75.24%
449
+ [ Wed Sep 14 23:06:33 2022 ] Training epoch: 61
450
+ [ Wed Sep 14 23:06:51 2022 ] Batch(19/243) done. Loss: 0.1210 lr:0.010000 network_time: 0.0301
451
+ [ Wed Sep 14 23:08:04 2022 ] Batch(119/243) done. Loss: 0.0949 lr:0.010000 network_time: 0.0270
452
+ [ Wed Sep 14 23:09:16 2022 ] Batch(219/243) done. Loss: 0.0451 lr:0.010000 network_time: 0.0265
453
+ [ Wed Sep 14 23:09:33 2022 ] Eval epoch: 61
454
+ [ Wed Sep 14 23:11:07 2022 ] Mean test loss of 796 batches: 2.6556432247161865.
455
+ [ Wed Sep 14 23:11:08 2022 ] Top1: 51.52%
456
+ [ Wed Sep 14 23:11:09 2022 ] Top5: 81.85%
457
+ [ Wed Sep 14 23:11:09 2022 ] Training epoch: 62
458
+ [ Wed Sep 14 23:12:08 2022 ] Batch(76/243) done. Loss: 0.0246 lr:0.010000 network_time: 0.0308
459
+ [ Wed Sep 14 23:13:21 2022 ] Batch(176/243) done. Loss: 0.0368 lr:0.010000 network_time: 0.0273
460
+ [ Wed Sep 14 23:14:09 2022 ] Eval epoch: 62
461
+ [ Wed Sep 14 23:15:43 2022 ] Mean test loss of 796 batches: 2.673084259033203.
462
+ [ Wed Sep 14 23:15:43 2022 ] Top1: 51.91%
463
+ [ Wed Sep 14 23:15:43 2022 ] Top5: 82.23%
464
+ [ Wed Sep 14 23:15:44 2022 ] Training epoch: 63
465
+ [ Wed Sep 14 23:16:12 2022 ] Batch(33/243) done. Loss: 0.0230 lr:0.010000 network_time: 0.0300
466
+ [ Wed Sep 14 23:17:24 2022 ] Batch(133/243) done. Loss: 0.0565 lr:0.010000 network_time: 0.0332
467
+ [ Wed Sep 14 23:18:37 2022 ] Batch(233/243) done. Loss: 0.0326 lr:0.010000 network_time: 0.0257
468
+ [ Wed Sep 14 23:18:44 2022 ] Eval epoch: 63
469
+ [ Wed Sep 14 23:20:17 2022 ] Mean test loss of 796 batches: 2.6055307388305664.
470
+ [ Wed Sep 14 23:20:18 2022 ] Top1: 52.54%
471
+ [ Wed Sep 14 23:20:19 2022 ] Top5: 82.33%
472
+ [ Wed Sep 14 23:20:19 2022 ] Training epoch: 64
473
+ [ Wed Sep 14 23:21:28 2022 ] Batch(90/243) done. Loss: 0.0097 lr:0.010000 network_time: 0.0307
474
+ [ Wed Sep 14 23:22:41 2022 ] Batch(190/243) done. Loss: 0.0286 lr:0.010000 network_time: 0.0270
475
+ [ Wed Sep 14 23:23:19 2022 ] Eval epoch: 64
476
+ [ Wed Sep 14 23:24:52 2022 ] Mean test loss of 796 batches: 2.6478676795959473.
477
+ [ Wed Sep 14 23:24:53 2022 ] Top1: 51.81%
478
+ [ Wed Sep 14 23:24:53 2022 ] Top5: 82.13%
479
+ [ Wed Sep 14 23:24:53 2022 ] Training epoch: 65
480
+ [ Wed Sep 14 23:25:31 2022 ] Batch(47/243) done. Loss: 0.0156 lr:0.010000 network_time: 0.0270
481
+ [ Wed Sep 14 23:26:44 2022 ] Batch(147/243) done. Loss: 0.0217 lr:0.010000 network_time: 0.0272
482
+ [ Wed Sep 14 23:27:54 2022 ] Eval epoch: 65
483
+ [ Wed Sep 14 23:29:27 2022 ] Mean test loss of 796 batches: 2.712385892868042.
484
+ [ Wed Sep 14 23:29:28 2022 ] Top1: 52.71%
485
+ [ Wed Sep 14 23:29:29 2022 ] Top5: 82.57%
486
+ [ Wed Sep 14 23:29:29 2022 ] Training epoch: 66
487
+ [ Wed Sep 14 23:29:35 2022 ] Batch(4/243) done. Loss: 0.0296 lr:0.010000 network_time: 0.0305
488
+ [ Wed Sep 14 23:30:48 2022 ] Batch(104/243) done. Loss: 0.0082 lr:0.010000 network_time: 0.0283
489
+ [ Wed Sep 14 23:32:01 2022 ] Batch(204/243) done. Loss: 0.0840 lr:0.010000 network_time: 0.0287
490
+ [ Wed Sep 14 23:32:29 2022 ] Eval epoch: 66
491
+ [ Wed Sep 14 23:34:03 2022 ] Mean test loss of 796 batches: 2.6864709854125977.
492
+ [ Wed Sep 14 23:34:03 2022 ] Top1: 52.50%
493
+ [ Wed Sep 14 23:34:03 2022 ] Top5: 82.39%
494
+ [ Wed Sep 14 23:34:04 2022 ] Training epoch: 67
495
+ [ Wed Sep 14 23:34:52 2022 ] Batch(61/243) done. Loss: 0.0235 lr:0.010000 network_time: 0.0258
496
+ [ Wed Sep 14 23:36:05 2022 ] Batch(161/243) done. Loss: 0.0380 lr:0.010000 network_time: 0.0283
497
+ [ Wed Sep 14 23:37:04 2022 ] Eval epoch: 67
498
+ [ Wed Sep 14 23:38:37 2022 ] Mean test loss of 796 batches: 2.689728260040283.
499
+ [ Wed Sep 14 23:38:37 2022 ] Top1: 52.35%
500
+ [ Wed Sep 14 23:38:38 2022 ] Top5: 82.37%
501
+ [ Wed Sep 14 23:38:38 2022 ] Training epoch: 68
502
+ [ Wed Sep 14 23:38:55 2022 ] Batch(18/243) done. Loss: 0.0303 lr:0.010000 network_time: 0.0293
503
+ [ Wed Sep 14 23:40:08 2022 ] Batch(118/243) done. Loss: 0.0173 lr:0.010000 network_time: 0.0286
504
+ [ Wed Sep 14 23:41:20 2022 ] Batch(218/243) done. Loss: 0.0388 lr:0.010000 network_time: 0.0266
505
+ [ Wed Sep 14 23:41:38 2022 ] Eval epoch: 68
506
+ [ Wed Sep 14 23:43:12 2022 ] Mean test loss of 796 batches: 2.7163398265838623.
507
+ [ Wed Sep 14 23:43:13 2022 ] Top1: 51.46%
508
+ [ Wed Sep 14 23:43:13 2022 ] Top5: 81.70%
509
+ [ Wed Sep 14 23:43:13 2022 ] Training epoch: 69
510
+ [ Wed Sep 14 23:44:12 2022 ] Batch(75/243) done. Loss: 0.0145 lr:0.010000 network_time: 0.0321
511
+ [ Wed Sep 14 23:45:24 2022 ] Batch(175/243) done. Loss: 0.0141 lr:0.010000 network_time: 0.0321
512
+ [ Wed Sep 14 23:46:14 2022 ] Eval epoch: 69
513
+ [ Wed Sep 14 23:47:47 2022 ] Mean test loss of 796 batches: 2.7593424320220947.
514
+ [ Wed Sep 14 23:47:47 2022 ] Top1: 51.67%
515
+ [ Wed Sep 14 23:47:48 2022 ] Top5: 81.67%
516
+ [ Wed Sep 14 23:47:48 2022 ] Training epoch: 70
517
+ [ Wed Sep 14 23:48:15 2022 ] Batch(32/243) done. Loss: 0.0063 lr:0.010000 network_time: 0.0278
518
+ [ Wed Sep 14 23:49:28 2022 ] Batch(132/243) done. Loss: 0.0079 lr:0.010000 network_time: 0.0273
519
+ [ Wed Sep 14 23:50:41 2022 ] Batch(232/243) done. Loss: 0.0117 lr:0.010000 network_time: 0.0312
520
+ [ Wed Sep 14 23:50:48 2022 ] Eval epoch: 70
521
+ [ Wed Sep 14 23:52:22 2022 ] Mean test loss of 796 batches: 2.6540184020996094.
522
+ [ Wed Sep 14 23:52:23 2022 ] Top1: 53.21%
523
+ [ Wed Sep 14 23:52:23 2022 ] Top5: 82.79%
524
+ [ Wed Sep 14 23:52:24 2022 ] Training epoch: 71
525
+ [ Wed Sep 14 23:53:32 2022 ] Batch(89/243) done. Loss: 0.0049 lr:0.010000 network_time: 0.0321
526
+ [ Wed Sep 14 23:54:45 2022 ] Batch(189/243) done. Loss: 0.0227 lr:0.010000 network_time: 0.0279
527
+ [ Wed Sep 14 23:55:24 2022 ] Eval epoch: 71
528
+ [ Wed Sep 14 23:56:57 2022 ] Mean test loss of 796 batches: 2.771545648574829.
529
+ [ Wed Sep 14 23:56:58 2022 ] Top1: 50.36%
530
+ [ Wed Sep 14 23:56:58 2022 ] Top5: 81.09%
531
+ [ Wed Sep 14 23:56:59 2022 ] Training epoch: 72
532
+ [ Wed Sep 14 23:57:36 2022 ] Batch(46/243) done. Loss: 0.0092 lr:0.010000 network_time: 0.0273
533
+ [ Wed Sep 14 23:58:49 2022 ] Batch(146/243) done. Loss: 0.0121 lr:0.010000 network_time: 0.0303
534
+ [ Wed Sep 14 23:59:59 2022 ] Eval epoch: 72
535
+ [ Thu Sep 15 00:01:33 2022 ] Mean test loss of 796 batches: 2.7130022048950195.
536
+ [ Thu Sep 15 00:01:33 2022 ] Top1: 52.53%
537
+ [ Thu Sep 15 00:01:34 2022 ] Top5: 82.31%
538
+ [ Thu Sep 15 00:01:34 2022 ] Training epoch: 73
539
+ [ Thu Sep 15 00:01:40 2022 ] Batch(3/243) done. Loss: 0.0085 lr:0.010000 network_time: 0.0277
540
+ [ Thu Sep 15 00:02:53 2022 ] Batch(103/243) done. Loss: 0.0057 lr:0.010000 network_time: 0.0288
541
+ [ Thu Sep 15 00:04:05 2022 ] Batch(203/243) done. Loss: 0.0071 lr:0.010000 network_time: 0.0289
542
+ [ Thu Sep 15 00:04:34 2022 ] Eval epoch: 73
543
+ [ Thu Sep 15 00:06:07 2022 ] Mean test loss of 796 batches: 2.7051308155059814.
544
+ [ Thu Sep 15 00:06:07 2022 ] Top1: 52.59%
545
+ [ Thu Sep 15 00:06:08 2022 ] Top5: 82.37%
546
+ [ Thu Sep 15 00:06:08 2022 ] Training epoch: 74
547
+ [ Thu Sep 15 00:06:55 2022 ] Batch(60/243) done. Loss: 0.0133 lr:0.010000 network_time: 0.0303
548
+ [ Thu Sep 15 00:08:08 2022 ] Batch(160/243) done. Loss: 0.0161 lr:0.010000 network_time: 0.0272
549
+ [ Thu Sep 15 00:09:08 2022 ] Eval epoch: 74
550
+ [ Thu Sep 15 00:10:42 2022 ] Mean test loss of 796 batches: 2.7454113960266113.
551
+ [ Thu Sep 15 00:10:43 2022 ] Top1: 53.42%
552
+ [ Thu Sep 15 00:10:43 2022 ] Top5: 82.85%
553
+ [ Thu Sep 15 00:10:43 2022 ] Training epoch: 75
554
+ [ Thu Sep 15 00:10:59 2022 ] Batch(17/243) done. Loss: 0.0106 lr:0.010000 network_time: 0.0302
555
+ [ Thu Sep 15 00:12:12 2022 ] Batch(117/243) done. Loss: 0.0101 lr:0.010000 network_time: 0.0315
556
+ [ Thu Sep 15 00:13:25 2022 ] Batch(217/243) done. Loss: 0.0074 lr:0.010000 network_time: 0.0309
557
+ [ Thu Sep 15 00:13:44 2022 ] Eval epoch: 75
558
+ [ Thu Sep 15 00:15:17 2022 ] Mean test loss of 796 batches: 2.6957931518554688.
559
+ [ Thu Sep 15 00:15:17 2022 ] Top1: 53.34%
560
+ [ Thu Sep 15 00:15:18 2022 ] Top5: 82.92%
561
+ [ Thu Sep 15 00:15:18 2022 ] Training epoch: 76
562
+ [ Thu Sep 15 00:16:15 2022 ] Batch(74/243) done. Loss: 0.0067 lr:0.010000 network_time: 0.0269
563
+ [ Thu Sep 15 00:17:28 2022 ] Batch(174/243) done. Loss: 0.0176 lr:0.010000 network_time: 0.0273
564
+ [ Thu Sep 15 00:18:18 2022 ] Eval epoch: 76
565
+ [ Thu Sep 15 00:19:51 2022 ] Mean test loss of 796 batches: 2.671660900115967.
566
+ [ Thu Sep 15 00:19:52 2022 ] Top1: 52.39%
567
+ [ Thu Sep 15 00:19:52 2022 ] Top5: 82.38%
568
+ [ Thu Sep 15 00:19:52 2022 ] Training epoch: 77
569
+ [ Thu Sep 15 00:20:18 2022 ] Batch(31/243) done. Loss: 0.0083 lr:0.010000 network_time: 0.0286
570
+ [ Thu Sep 15 00:21:31 2022 ] Batch(131/243) done. Loss: 0.0144 lr:0.010000 network_time: 0.0280
571
+ [ Thu Sep 15 00:22:44 2022 ] Batch(231/243) done. Loss: 0.0086 lr:0.010000 network_time: 0.0455
572
+ [ Thu Sep 15 00:22:53 2022 ] Eval epoch: 77
573
+ [ Thu Sep 15 00:24:25 2022 ] Mean test loss of 796 batches: 2.7283172607421875.
574
+ [ Thu Sep 15 00:24:26 2022 ] Top1: 52.29%
575
+ [ Thu Sep 15 00:24:26 2022 ] Top5: 82.03%
576
+ [ Thu Sep 15 00:24:26 2022 ] Training epoch: 78
577
+ [ Thu Sep 15 00:25:34 2022 ] Batch(88/243) done. Loss: 0.0064 lr:0.010000 network_time: 0.0271
578
+ [ Thu Sep 15 00:26:47 2022 ] Batch(188/243) done. Loss: 0.0194 lr:0.010000 network_time: 0.0329
579
+ [ Thu Sep 15 00:27:27 2022 ] Eval epoch: 78
580
+ [ Thu Sep 15 00:29:00 2022 ] Mean test loss of 796 batches: 2.826481580734253.
581
+ [ Thu Sep 15 00:29:00 2022 ] Top1: 51.79%
582
+ [ Thu Sep 15 00:29:00 2022 ] Top5: 81.89%
583
+ [ Thu Sep 15 00:29:01 2022 ] Training epoch: 79
584
+ [ Thu Sep 15 00:29:37 2022 ] Batch(45/243) done. Loss: 0.0074 lr:0.010000 network_time: 0.0269
585
+ [ Thu Sep 15 00:30:50 2022 ] Batch(145/243) done. Loss: 0.0053 lr:0.010000 network_time: 0.0278
586
+ [ Thu Sep 15 00:32:01 2022 ] Eval epoch: 79
587
+ [ Thu Sep 15 00:33:34 2022 ] Mean test loss of 796 batches: 2.7605230808258057.
588
+ [ Thu Sep 15 00:33:34 2022 ] Top1: 52.66%
589
+ [ Thu Sep 15 00:33:35 2022 ] Top5: 82.48%
590
+ [ Thu Sep 15 00:33:35 2022 ] Training epoch: 80
591
+ [ Thu Sep 15 00:33:40 2022 ] Batch(2/243) done. Loss: 0.0047 lr:0.010000 network_time: 0.0335
592
+ [ Thu Sep 15 00:34:53 2022 ] Batch(102/243) done. Loss: 0.0056 lr:0.010000 network_time: 0.0275
593
+ [ Thu Sep 15 00:36:06 2022 ] Batch(202/243) done. Loss: 0.0122 lr:0.010000 network_time: 0.0267
594
+ [ Thu Sep 15 00:36:35 2022 ] Eval epoch: 80
595
+ [ Thu Sep 15 00:38:08 2022 ] Mean test loss of 796 batches: 2.718104600906372.
596
+ [ Thu Sep 15 00:38:09 2022 ] Top1: 52.58%
597
+ [ Thu Sep 15 00:38:09 2022 ] Top5: 82.38%
598
+ [ Thu Sep 15 00:38:09 2022 ] Training epoch: 81
599
+ [ Thu Sep 15 00:38:56 2022 ] Batch(59/243) done. Loss: 0.0089 lr:0.001000 network_time: 0.0282
600
+ [ Thu Sep 15 00:40:09 2022 ] Batch(159/243) done. Loss: 0.0033 lr:0.001000 network_time: 0.0361
601
+ [ Thu Sep 15 00:41:09 2022 ] Eval epoch: 81
602
+ [ Thu Sep 15 00:42:43 2022 ] Mean test loss of 796 batches: 2.718365430831909.
603
+ [ Thu Sep 15 00:42:43 2022 ] Top1: 52.48%
604
+ [ Thu Sep 15 00:42:44 2022 ] Top5: 82.39%
605
+ [ Thu Sep 15 00:42:44 2022 ] Training epoch: 82
606
+ [ Thu Sep 15 00:42:59 2022 ] Batch(16/243) done. Loss: 0.0101 lr:0.001000 network_time: 0.0280
607
+ [ Thu Sep 15 00:44:12 2022 ] Batch(116/243) done. Loss: 0.0033 lr:0.001000 network_time: 0.0307
608
+ [ Thu Sep 15 00:45:25 2022 ] Batch(216/243) done. Loss: 0.0155 lr:0.001000 network_time: 0.0332
609
+ [ Thu Sep 15 00:45:44 2022 ] Eval epoch: 82
610
+ [ Thu Sep 15 00:47:17 2022 ] Mean test loss of 796 batches: 2.7449679374694824.
611
+ [ Thu Sep 15 00:47:17 2022 ] Top1: 52.72%
612
+ [ Thu Sep 15 00:47:17 2022 ] Top5: 82.64%
613
+ [ Thu Sep 15 00:47:18 2022 ] Training epoch: 83
614
+ [ Thu Sep 15 00:48:15 2022 ] Batch(73/243) done. Loss: 0.0083 lr:0.001000 network_time: 0.0306
615
+ [ Thu Sep 15 00:49:27 2022 ] Batch(173/243) done. Loss: 0.0050 lr:0.001000 network_time: 0.0270
616
+ [ Thu Sep 15 00:50:18 2022 ] Eval epoch: 83
617
+ [ Thu Sep 15 00:51:51 2022 ] Mean test loss of 796 batches: 2.822268486022949.
618
+ [ Thu Sep 15 00:51:51 2022 ] Top1: 52.45%
619
+ [ Thu Sep 15 00:51:52 2022 ] Top5: 82.06%
620
+ [ Thu Sep 15 00:51:52 2022 ] Training epoch: 84
621
+ [ Thu Sep 15 00:52:18 2022 ] Batch(30/243) done. Loss: 0.0095 lr:0.001000 network_time: 0.0279
622
+ [ Thu Sep 15 00:53:30 2022 ] Batch(130/243) done. Loss: 0.0173 lr:0.001000 network_time: 0.0269
623
+ [ Thu Sep 15 00:54:43 2022 ] Batch(230/243) done. Loss: 0.0036 lr:0.001000 network_time: 0.0300
624
+ [ Thu Sep 15 00:54:52 2022 ] Eval epoch: 84
625
+ [ Thu Sep 15 00:56:26 2022 ] Mean test loss of 796 batches: 2.6593453884124756.
626
+ [ Thu Sep 15 00:56:27 2022 ] Top1: 53.58%
627
+ [ Thu Sep 15 00:56:27 2022 ] Top5: 83.14%
628
+ [ Thu Sep 15 00:56:27 2022 ] Training epoch: 85
629
+ [ Thu Sep 15 00:57:34 2022 ] Batch(87/243) done. Loss: 0.0185 lr:0.001000 network_time: 0.0267
630
+ [ Thu Sep 15 00:58:47 2022 ] Batch(187/243) done. Loss: 0.0083 lr:0.001000 network_time: 0.0271
631
+ [ Thu Sep 15 00:59:28 2022 ] Eval epoch: 85
632
+ [ Thu Sep 15 01:01:01 2022 ] Mean test loss of 796 batches: 2.702404737472534.
633
+ [ Thu Sep 15 01:01:01 2022 ] Top1: 53.10%
634
+ [ Thu Sep 15 01:01:02 2022 ] Top5: 82.78%
635
+ [ Thu Sep 15 01:01:02 2022 ] Training epoch: 86
636
+ [ Thu Sep 15 01:01:38 2022 ] Batch(44/243) done. Loss: 0.0119 lr:0.001000 network_time: 0.0265
637
+ [ Thu Sep 15 01:02:50 2022 ] Batch(144/243) done. Loss: 0.0163 lr:0.001000 network_time: 0.0269
638
+ [ Thu Sep 15 01:04:02 2022 ] Eval epoch: 86
639
+ [ Thu Sep 15 01:05:35 2022 ] Mean test loss of 796 batches: 2.6936655044555664.
640
+ [ Thu Sep 15 01:05:36 2022 ] Top1: 52.86%
641
+ [ Thu Sep 15 01:05:36 2022 ] Top5: 82.57%
642
+ [ Thu Sep 15 01:05:36 2022 ] Training epoch: 87
643
+ [ Thu Sep 15 01:05:41 2022 ] Batch(1/243) done. Loss: 0.0072 lr:0.001000 network_time: 0.0245
644
+ [ Thu Sep 15 01:06:54 2022 ] Batch(101/243) done. Loss: 0.0078 lr:0.001000 network_time: 0.0275
645
+ [ Thu Sep 15 01:08:06 2022 ] Batch(201/243) done. Loss: 0.0066 lr:0.001000 network_time: 0.0276
646
+ [ Thu Sep 15 01:08:37 2022 ] Eval epoch: 87
647
+ [ Thu Sep 15 01:10:10 2022 ] Mean test loss of 796 batches: 2.7110776901245117.
648
+ [ Thu Sep 15 01:10:11 2022 ] Top1: 52.20%
649
+ [ Thu Sep 15 01:10:11 2022 ] Top5: 82.28%
650
+ [ Thu Sep 15 01:10:11 2022 ] Training epoch: 88
651
+ [ Thu Sep 15 01:10:57 2022 ] Batch(58/243) done. Loss: 0.0064 lr:0.001000 network_time: 0.0315
652
+ [ Thu Sep 15 01:12:10 2022 ] Batch(158/243) done. Loss: 0.0125 lr:0.001000 network_time: 0.0266
653
+ [ Thu Sep 15 01:13:11 2022 ] Eval epoch: 88
654
+ [ Thu Sep 15 01:14:44 2022 ] Mean test loss of 796 batches: 2.755002498626709.
655
+ [ Thu Sep 15 01:14:45 2022 ] Top1: 52.74%
656
+ [ Thu Sep 15 01:14:45 2022 ] Top5: 82.36%
657
+ [ Thu Sep 15 01:14:45 2022 ] Training epoch: 89
658
+ [ Thu Sep 15 01:15:00 2022 ] Batch(15/243) done. Loss: 0.0059 lr:0.001000 network_time: 0.0282
659
+ [ Thu Sep 15 01:16:13 2022 ] Batch(115/243) done. Loss: 0.0055 lr:0.001000 network_time: 0.0269
660
+ [ Thu Sep 15 01:17:26 2022 ] Batch(215/243) done. Loss: 0.0053 lr:0.001000 network_time: 0.0314
661
+ [ Thu Sep 15 01:17:46 2022 ] Eval epoch: 89
662
+ [ Thu Sep 15 01:19:19 2022 ] Mean test loss of 796 batches: 2.7093260288238525.
663
+ [ Thu Sep 15 01:19:19 2022 ] Top1: 52.87%
664
+ [ Thu Sep 15 01:19:20 2022 ] Top5: 82.58%
665
+ [ Thu Sep 15 01:19:20 2022 ] Training epoch: 90
666
+ [ Thu Sep 15 01:20:16 2022 ] Batch(72/243) done. Loss: 0.0056 lr:0.001000 network_time: 0.0516
667
+ [ Thu Sep 15 01:21:29 2022 ] Batch(172/243) done. Loss: 0.0102 lr:0.001000 network_time: 0.0275
668
+ [ Thu Sep 15 01:22:20 2022 ] Eval epoch: 90
669
+ [ Thu Sep 15 01:23:53 2022 ] Mean test loss of 796 batches: 2.71431303024292.
670
+ [ Thu Sep 15 01:23:54 2022 ] Top1: 53.00%
671
+ [ Thu Sep 15 01:23:54 2022 ] Top5: 82.56%
672
+ [ Thu Sep 15 01:23:54 2022 ] Training epoch: 91
673
+ [ Thu Sep 15 01:24:19 2022 ] Batch(29/243) done. Loss: 0.0107 lr:0.001000 network_time: 0.0299
674
+ [ Thu Sep 15 01:25:32 2022 ] Batch(129/243) done. Loss: 0.0092 lr:0.001000 network_time: 0.0271
675
+ [ Thu Sep 15 01:26:45 2022 ] Batch(229/243) done. Loss: 0.0047 lr:0.001000 network_time: 0.0313
676
+ [ Thu Sep 15 01:26:54 2022 ] Eval epoch: 91
677
+ [ Thu Sep 15 01:28:28 2022 ] Mean test loss of 796 batches: 2.732354164123535.
678
+ [ Thu Sep 15 01:28:28 2022 ] Top1: 52.52%
679
+ [ Thu Sep 15 01:28:29 2022 ] Top5: 82.25%
680
+ [ Thu Sep 15 01:28:29 2022 ] Training epoch: 92
681
+ [ Thu Sep 15 01:29:35 2022 ] Batch(86/243) done. Loss: 0.0046 lr:0.001000 network_time: 0.0280
682
+ [ Thu Sep 15 01:30:48 2022 ] Batch(186/243) done. Loss: 0.0053 lr:0.001000 network_time: 0.0270
683
+ [ Thu Sep 15 01:31:29 2022 ] Eval epoch: 92
684
+ [ Thu Sep 15 01:33:03 2022 ] Mean test loss of 796 batches: 2.710573196411133.
685
+ [ Thu Sep 15 01:33:03 2022 ] Top1: 53.56%
686
+ [ Thu Sep 15 01:33:04 2022 ] Top5: 83.14%
687
+ [ Thu Sep 15 01:33:04 2022 ] Training epoch: 93
688
+ [ Thu Sep 15 01:33:39 2022 ] Batch(43/243) done. Loss: 0.0047 lr:0.001000 network_time: 0.0268
689
+ [ Thu Sep 15 01:34:52 2022 ] Batch(143/243) done. Loss: 0.0108 lr:0.001000 network_time: 0.0303
690
+ [ Thu Sep 15 01:36:04 2022 ] Eval epoch: 93
691
+ [ Thu Sep 15 01:37:37 2022 ] Mean test loss of 796 batches: 2.7435684204101562.
692
+ [ Thu Sep 15 01:37:38 2022 ] Top1: 52.97%
693
+ [ Thu Sep 15 01:37:38 2022 ] Top5: 82.68%
694
+ [ Thu Sep 15 01:37:38 2022 ] Training epoch: 94
695
+ [ Thu Sep 15 01:37:42 2022 ] Batch(0/243) done. Loss: 0.0251 lr:0.001000 network_time: 0.0575
696
+ [ Thu Sep 15 01:38:55 2022 ] Batch(100/243) done. Loss: 0.0031 lr:0.001000 network_time: 0.0354
697
+ [ Thu Sep 15 01:40:08 2022 ] Batch(200/243) done. Loss: 0.0062 lr:0.001000 network_time: 0.0263
698
+ [ Thu Sep 15 01:40:39 2022 ] Eval epoch: 94
699
+ [ Thu Sep 15 01:42:12 2022 ] Mean test loss of 796 batches: 2.8339927196502686.
700
+ [ Thu Sep 15 01:42:13 2022 ] Top1: 49.99%
701
+ [ Thu Sep 15 01:42:13 2022 ] Top5: 80.98%
702
+ [ Thu Sep 15 01:42:13 2022 ] Training epoch: 95
703
+ [ Thu Sep 15 01:42:58 2022 ] Batch(57/243) done. Loss: 0.0037 lr:0.001000 network_time: 0.0306
704
+ [ Thu Sep 15 01:44:11 2022 ] Batch(157/243) done. Loss: 0.0046 lr:0.001000 network_time: 0.0276
705
+ [ Thu Sep 15 01:45:14 2022 ] Eval epoch: 95
706
+ [ Thu Sep 15 01:46:47 2022 ] Mean test loss of 796 batches: 2.682378053665161.
707
+ [ Thu Sep 15 01:46:47 2022 ] Top1: 52.81%
708
+ [ Thu Sep 15 01:46:48 2022 ] Top5: 82.45%
709
+ [ Thu Sep 15 01:46:48 2022 ] Training epoch: 96
710
+ [ Thu Sep 15 01:47:02 2022 ] Batch(14/243) done. Loss: 0.0071 lr:0.001000 network_time: 0.0259
711
+ [ Thu Sep 15 01:48:15 2022 ] Batch(114/243) done. Loss: 0.0159 lr:0.001000 network_time: 0.0266
712
+ [ Thu Sep 15 01:49:28 2022 ] Batch(214/243) done. Loss: 0.0060 lr:0.001000 network_time: 0.0269
713
+ [ Thu Sep 15 01:49:48 2022 ] Eval epoch: 96
714
+ [ Thu Sep 15 01:51:22 2022 ] Mean test loss of 796 batches: 2.7707955837249756.
715
+ [ Thu Sep 15 01:51:22 2022 ] Top1: 52.46%
716
+ [ Thu Sep 15 01:51:23 2022 ] Top5: 82.37%
717
+ [ Thu Sep 15 01:51:23 2022 ] Training epoch: 97
718
+ [ Thu Sep 15 01:52:18 2022 ] Batch(71/243) done. Loss: 0.0095 lr:0.001000 network_time: 0.0325
719
+ [ Thu Sep 15 01:53:31 2022 ] Batch(171/243) done. Loss: 0.0051 lr:0.001000 network_time: 0.0266
720
+ [ Thu Sep 15 01:54:23 2022 ] Eval epoch: 97
721
+ [ Thu Sep 15 01:55:56 2022 ] Mean test loss of 796 batches: 2.725510597229004.
722
+ [ Thu Sep 15 01:55:57 2022 ] Top1: 52.79%
723
+ [ Thu Sep 15 01:55:57 2022 ] Top5: 82.58%
724
+ [ Thu Sep 15 01:55:58 2022 ] Training epoch: 98
725
+ [ Thu Sep 15 01:56:22 2022 ] Batch(28/243) done. Loss: 0.0046 lr:0.001000 network_time: 0.0261
726
+ [ Thu Sep 15 01:57:34 2022 ] Batch(128/243) done. Loss: 0.0079 lr:0.001000 network_time: 0.0325
727
+ [ Thu Sep 15 01:58:47 2022 ] Batch(228/243) done. Loss: 0.0101 lr:0.001000 network_time: 0.0305
728
+ [ Thu Sep 15 01:58:58 2022 ] Eval epoch: 98
729
+ [ Thu Sep 15 02:00:31 2022 ] Mean test loss of 796 batches: 2.7130801677703857.
730
+ [ Thu Sep 15 02:00:32 2022 ] Top1: 53.37%
731
+ [ Thu Sep 15 02:00:32 2022 ] Top5: 82.82%
732
+ [ Thu Sep 15 02:00:32 2022 ] Training epoch: 99
733
+ [ Thu Sep 15 02:01:38 2022 ] Batch(85/243) done. Loss: 0.0145 lr:0.001000 network_time: 0.0272
734
+ [ Thu Sep 15 02:02:51 2022 ] Batch(185/243) done. Loss: 0.0849 lr:0.001000 network_time: 0.0275
735
+ [ Thu Sep 15 02:03:32 2022 ] Eval epoch: 99
736
+ [ Thu Sep 15 02:05:06 2022 ] Mean test loss of 796 batches: 2.702904224395752.
737
+ [ Thu Sep 15 02:05:06 2022 ] Top1: 53.17%
738
+ [ Thu Sep 15 02:05:07 2022 ] Top5: 82.73%
739
+ [ Thu Sep 15 02:05:07 2022 ] Training epoch: 100
740
+ [ Thu Sep 15 02:05:41 2022 ] Batch(42/243) done. Loss: 0.0102 lr:0.001000 network_time: 0.0273
741
+ [ Thu Sep 15 02:06:54 2022 ] Batch(142/243) done. Loss: 0.0022 lr:0.001000 network_time: 0.0318
742
+ [ Thu Sep 15 02:08:07 2022 ] Batch(242/243) done. Loss: 0.0068 lr:0.001000 network_time: 0.0308
743
+ [ Thu Sep 15 02:08:07 2022 ] Eval epoch: 100
744
+ [ Thu Sep 15 02:09:40 2022 ] Mean test loss of 796 batches: 2.74697208404541.
745
+ [ Thu Sep 15 02:09:41 2022 ] Top1: 53.47%
746
+ [ Thu Sep 15 02:09:42 2022 ] Top5: 82.99%
747
+ [ Thu Sep 15 02:09:42 2022 ] Training epoch: 101
748
+ [ Thu Sep 15 02:10:58 2022 ] Batch(99/243) done. Loss: 0.0039 lr:0.000100 network_time: 0.0306
749
+ [ Thu Sep 15 02:12:11 2022 ] Batch(199/243) done. Loss: 0.0048 lr:0.000100 network_time: 0.0266
750
+ [ Thu Sep 15 02:12:42 2022 ] Eval epoch: 101
751
+ [ Thu Sep 15 02:14:16 2022 ] Mean test loss of 796 batches: 2.763756275177002.
752
+ [ Thu Sep 15 02:14:16 2022 ] Top1: 53.25%
753
+ [ Thu Sep 15 02:14:17 2022 ] Top5: 82.73%
754
+ [ Thu Sep 15 02:14:17 2022 ] Training epoch: 102
755
+ [ Thu Sep 15 02:15:02 2022 ] Batch(56/243) done. Loss: 0.0045 lr:0.000100 network_time: 0.0257
756
+ [ Thu Sep 15 02:16:15 2022 ] Batch(156/243) done. Loss: 0.0088 lr:0.000100 network_time: 0.0262
757
+ [ Thu Sep 15 02:17:18 2022 ] Eval epoch: 102
758
+ [ Thu Sep 15 02:18:52 2022 ] Mean test loss of 796 batches: 2.7571358680725098.
759
+ [ Thu Sep 15 02:18:52 2022 ] Top1: 52.87%
760
+ [ Thu Sep 15 02:18:52 2022 ] Top5: 82.69%
761
+ [ Thu Sep 15 02:18:53 2022 ] Training epoch: 103
762
+ [ Thu Sep 15 02:19:06 2022 ] Batch(13/243) done. Loss: 0.0021 lr:0.000100 network_time: 0.0316
763
+ [ Thu Sep 15 02:20:19 2022 ] Batch(113/243) done. Loss: 0.0048 lr:0.000100 network_time: 0.0274
764
+ [ Thu Sep 15 02:21:31 2022 ] Batch(213/243) done. Loss: 0.0023 lr:0.000100 network_time: 0.0336
765
+ [ Thu Sep 15 02:21:53 2022 ] Eval epoch: 103
766
+ [ Thu Sep 15 02:23:26 2022 ] Mean test loss of 796 batches: 2.692415952682495.
767
+ [ Thu Sep 15 02:23:26 2022 ] Top1: 52.04%
768
+ [ Thu Sep 15 02:23:27 2022 ] Top5: 82.16%
769
+ [ Thu Sep 15 02:23:27 2022 ] Training epoch: 104
770
+ [ Thu Sep 15 02:24:21 2022 ] Batch(70/243) done. Loss: 0.0102 lr:0.000100 network_time: 0.0269
771
+ [ Thu Sep 15 02:25:34 2022 ] Batch(170/243) done. Loss: 0.0059 lr:0.000100 network_time: 0.0278
772
+ [ Thu Sep 15 02:26:27 2022 ] Eval epoch: 104
773
+ [ Thu Sep 15 02:28:00 2022 ] Mean test loss of 796 batches: 2.7434446811676025.
774
+ [ Thu Sep 15 02:28:00 2022 ] Top1: 53.44%
775
+ [ Thu Sep 15 02:28:01 2022 ] Top5: 82.78%
776
+ [ Thu Sep 15 02:28:01 2022 ] Training epoch: 105
777
+ [ Thu Sep 15 02:28:25 2022 ] Batch(27/243) done. Loss: 0.0211 lr:0.000100 network_time: 0.0272
778
+ [ Thu Sep 15 02:29:38 2022 ] Batch(127/243) done. Loss: 0.0127 lr:0.000100 network_time: 0.0269
779
+ [ Thu Sep 15 02:30:50 2022 ] Batch(227/243) done. Loss: 0.0139 lr:0.000100 network_time: 0.0337
780
+ [ Thu Sep 15 02:31:02 2022 ] Eval epoch: 105
781
+ [ Thu Sep 15 02:32:35 2022 ] Mean test loss of 796 batches: 2.795769691467285.
782
+ [ Thu Sep 15 02:32:35 2022 ] Top1: 52.07%
783
+ [ Thu Sep 15 02:32:36 2022 ] Top5: 82.01%
784
+ [ Thu Sep 15 02:32:36 2022 ] Training epoch: 106
785
+ [ Thu Sep 15 02:33:41 2022 ] Batch(84/243) done. Loss: 0.0073 lr:0.000100 network_time: 0.0274
786
+ [ Thu Sep 15 02:34:54 2022 ] Batch(184/243) done. Loss: 0.0051 lr:0.000100 network_time: 0.0267
787
+ [ Thu Sep 15 02:35:37 2022 ] Eval epoch: 106
788
+ [ Thu Sep 15 02:37:10 2022 ] Mean test loss of 796 batches: 2.712216377258301.
789
+ [ Thu Sep 15 02:37:11 2022 ] Top1: 53.32%
790
+ [ Thu Sep 15 02:37:11 2022 ] Top5: 82.78%
791
+ [ Thu Sep 15 02:37:11 2022 ] Training epoch: 107
792
+ [ Thu Sep 15 02:37:45 2022 ] Batch(41/243) done. Loss: 0.0143 lr:0.000100 network_time: 0.0266
793
+ [ Thu Sep 15 02:38:58 2022 ] Batch(141/243) done. Loss: 0.0063 lr:0.000100 network_time: 0.0290
794
+ [ Thu Sep 15 02:40:11 2022 ] Batch(241/243) done. Loss: 0.0055 lr:0.000100 network_time: 0.0302
795
+ [ Thu Sep 15 02:40:12 2022 ] Eval epoch: 107
796
+ [ Thu Sep 15 02:41:45 2022 ] Mean test loss of 796 batches: 2.7313079833984375.
797
+ [ Thu Sep 15 02:41:46 2022 ] Top1: 52.23%
798
+ [ Thu Sep 15 02:41:47 2022 ] Top5: 82.21%
799
+ [ Thu Sep 15 02:41:47 2022 ] Training epoch: 108
800
+ [ Thu Sep 15 02:43:02 2022 ] Batch(98/243) done. Loss: 0.0028 lr:0.000100 network_time: 0.0365
801
+ [ Thu Sep 15 02:44:15 2022 ] Batch(198/243) done. Loss: 0.0055 lr:0.000100 network_time: 0.0281
802
+ [ Thu Sep 15 02:44:48 2022 ] Eval epoch: 108
803
+ [ Thu Sep 15 02:46:21 2022 ] Mean test loss of 796 batches: 2.706906318664551.
804
+ [ Thu Sep 15 02:46:22 2022 ] Top1: 53.04%
805
+ [ Thu Sep 15 02:46:22 2022 ] Top5: 82.57%
806
+ [ Thu Sep 15 02:46:22 2022 ] Training epoch: 109
807
+ [ Thu Sep 15 02:47:06 2022 ] Batch(55/243) done. Loss: 0.0072 lr:0.000100 network_time: 0.0318
808
+ [ Thu Sep 15 02:48:18 2022 ] Batch(155/243) done. Loss: 0.0065 lr:0.000100 network_time: 0.0317
809
+ [ Thu Sep 15 02:49:22 2022 ] Eval epoch: 109
810
+ [ Thu Sep 15 02:50:56 2022 ] Mean test loss of 796 batches: 2.666076898574829.
811
+ [ Thu Sep 15 02:50:56 2022 ] Top1: 53.15%
812
+ [ Thu Sep 15 02:50:56 2022 ] Top5: 82.74%
813
+ [ Thu Sep 15 02:50:57 2022 ] Training epoch: 110
814
+ [ Thu Sep 15 02:51:09 2022 ] Batch(12/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0263
815
+ [ Thu Sep 15 02:52:22 2022 ] Batch(112/243) done. Loss: 0.0100 lr:0.000100 network_time: 0.0275
816
+ [ Thu Sep 15 02:53:35 2022 ] Batch(212/243) done. Loss: 0.0072 lr:0.000100 network_time: 0.0272
817
+ [ Thu Sep 15 02:53:57 2022 ] Eval epoch: 110
818
+ [ Thu Sep 15 02:55:30 2022 ] Mean test loss of 796 batches: 2.782313585281372.
819
+ [ Thu Sep 15 02:55:31 2022 ] Top1: 50.79%
820
+ [ Thu Sep 15 02:55:31 2022 ] Top5: 81.20%
821
+ [ Thu Sep 15 02:55:31 2022 ] Training epoch: 111
822
+ [ Thu Sep 15 02:56:25 2022 ] Batch(69/243) done. Loss: 0.0072 lr:0.000100 network_time: 0.0277
823
+ [ Thu Sep 15 02:57:38 2022 ] Batch(169/243) done. Loss: 0.0077 lr:0.000100 network_time: 0.0303
824
+ [ Thu Sep 15 02:58:31 2022 ] Eval epoch: 111
825
+ [ Thu Sep 15 03:00:05 2022 ] Mean test loss of 796 batches: 2.7520694732666016.
826
+ [ Thu Sep 15 03:00:06 2022 ] Top1: 52.88%
827
+ [ Thu Sep 15 03:00:06 2022 ] Top5: 82.42%
828
+ [ Thu Sep 15 03:00:06 2022 ] Training epoch: 112
829
+ [ Thu Sep 15 03:00:29 2022 ] Batch(26/243) done. Loss: 0.0079 lr:0.000100 network_time: 0.0264
830
+ [ Thu Sep 15 03:01:42 2022 ] Batch(126/243) done. Loss: 0.0070 lr:0.000100 network_time: 0.0292
831
+ [ Thu Sep 15 03:02:54 2022 ] Batch(226/243) done. Loss: 0.0054 lr:0.000100 network_time: 0.0280
832
+ [ Thu Sep 15 03:03:06 2022 ] Eval epoch: 112
833
+ [ Thu Sep 15 03:04:39 2022 ] Mean test loss of 796 batches: 2.70778226852417.
834
+ [ Thu Sep 15 03:04:40 2022 ] Top1: 52.64%
835
+ [ Thu Sep 15 03:04:40 2022 ] Top5: 82.31%
836
+ [ Thu Sep 15 03:04:40 2022 ] Training epoch: 113
837
+ [ Thu Sep 15 03:05:45 2022 ] Batch(83/243) done. Loss: 0.0080 lr:0.000100 network_time: 0.0270
838
+ [ Thu Sep 15 03:06:57 2022 ] Batch(183/243) done. Loss: 0.0024 lr:0.000100 network_time: 0.0257
839
+ [ Thu Sep 15 03:07:41 2022 ] Eval epoch: 113
840
+ [ Thu Sep 15 03:09:14 2022 ] Mean test loss of 796 batches: 2.731163740158081.
841
+ [ Thu Sep 15 03:09:15 2022 ] Top1: 52.66%
842
+ [ Thu Sep 15 03:09:15 2022 ] Top5: 82.24%
843
+ [ Thu Sep 15 03:09:16 2022 ] Training epoch: 114
844
+ [ Thu Sep 15 03:09:48 2022 ] Batch(40/243) done. Loss: 0.0099 lr:0.000100 network_time: 0.0255
845
+ [ Thu Sep 15 03:11:01 2022 ] Batch(140/243) done. Loss: 0.0066 lr:0.000100 network_time: 0.0272
846
+ [ Thu Sep 15 03:12:14 2022 ] Batch(240/243) done. Loss: 0.0070 lr:0.000100 network_time: 0.0321
847
+ [ Thu Sep 15 03:12:16 2022 ] Eval epoch: 114
848
+ [ Thu Sep 15 03:13:49 2022 ] Mean test loss of 796 batches: 2.6991231441497803.
849
+ [ Thu Sep 15 03:13:49 2022 ] Top1: 52.93%
850
+ [ Thu Sep 15 03:13:50 2022 ] Top5: 82.75%
851
+ [ Thu Sep 15 03:13:50 2022 ] Training epoch: 115
852
+ [ Thu Sep 15 03:15:04 2022 ] Batch(97/243) done. Loss: 0.0059 lr:0.000100 network_time: 0.0321
853
+ [ Thu Sep 15 03:16:17 2022 ] Batch(197/243) done. Loss: 0.0041 lr:0.000100 network_time: 0.0268
854
+ [ Thu Sep 15 03:16:50 2022 ] Eval epoch: 115
855
+ [ Thu Sep 15 03:18:23 2022 ] Mean test loss of 796 batches: 2.6701998710632324.
856
+ [ Thu Sep 15 03:18:24 2022 ] Top1: 53.25%
857
+ [ Thu Sep 15 03:18:24 2022 ] Top5: 82.79%
858
+ [ Thu Sep 15 03:18:24 2022 ] Training epoch: 116
859
+ [ Thu Sep 15 03:19:07 2022 ] Batch(54/243) done. Loss: 0.0048 lr:0.000100 network_time: 0.0278
860
+ [ Thu Sep 15 03:20:20 2022 ] Batch(154/243) done. Loss: 0.0053 lr:0.000100 network_time: 0.0278
861
+ [ Thu Sep 15 03:21:24 2022 ] Eval epoch: 116
862
+ [ Thu Sep 15 03:22:57 2022 ] Mean test loss of 796 batches: 2.6070761680603027.
863
+ [ Thu Sep 15 03:22:58 2022 ] Top1: 53.54%
864
+ [ Thu Sep 15 03:22:58 2022 ] Top5: 83.07%
865
+ [ Thu Sep 15 03:22:58 2022 ] Training epoch: 117
866
+ [ Thu Sep 15 03:23:10 2022 ] Batch(11/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0337
867
+ [ Thu Sep 15 03:24:23 2022 ] Batch(111/243) done. Loss: 0.0064 lr:0.000100 network_time: 0.0279
868
+ [ Thu Sep 15 03:25:36 2022 ] Batch(211/243) done. Loss: 0.0091 lr:0.000100 network_time: 0.0264
869
+ [ Thu Sep 15 03:25:59 2022 ] Eval epoch: 117
870
+ [ Thu Sep 15 03:27:32 2022 ] Mean test loss of 796 batches: 2.7557594776153564.
871
+ [ Thu Sep 15 03:27:33 2022 ] Top1: 53.14%
872
+ [ Thu Sep 15 03:27:34 2022 ] Top5: 82.57%
873
+ [ Thu Sep 15 03:27:34 2022 ] Training epoch: 118
874
+ [ Thu Sep 15 03:28:27 2022 ] Batch(68/243) done. Loss: 0.0038 lr:0.000100 network_time: 0.0273
875
+ [ Thu Sep 15 03:29:40 2022 ] Batch(168/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0276
876
+ [ Thu Sep 15 03:30:34 2022 ] Eval epoch: 118
877
+ [ Thu Sep 15 03:32:08 2022 ] Mean test loss of 796 batches: 2.7875142097473145.
878
+ [ Thu Sep 15 03:32:08 2022 ] Top1: 51.65%
879
+ [ Thu Sep 15 03:32:09 2022 ] Top5: 81.81%
880
+ [ Thu Sep 15 03:32:09 2022 ] Training epoch: 119
881
+ [ Thu Sep 15 03:32:31 2022 ] Batch(25/243) done. Loss: 0.0088 lr:0.000100 network_time: 0.0341
882
+ [ Thu Sep 15 03:33:44 2022 ] Batch(125/243) done. Loss: 0.0112 lr:0.000100 network_time: 0.0273
883
+ [ Thu Sep 15 03:34:57 2022 ] Batch(225/243) done. Loss: 0.0063 lr:0.000100 network_time: 0.0305
884
+ [ Thu Sep 15 03:35:09 2022 ] Eval epoch: 119
885
+ [ Thu Sep 15 03:36:42 2022 ] Mean test loss of 796 batches: 2.72087025642395.
886
+ [ Thu Sep 15 03:36:43 2022 ] Top1: 52.42%
887
+ [ Thu Sep 15 03:36:43 2022 ] Top5: 82.27%
888
+ [ Thu Sep 15 03:36:44 2022 ] Training epoch: 120
889
+ [ Thu Sep 15 03:37:47 2022 ] Batch(82/243) done. Loss: 0.0040 lr:0.000100 network_time: 0.0303
890
+ [ Thu Sep 15 03:39:00 2022 ] Batch(182/243) done. Loss: 0.0095 lr:0.000100 network_time: 0.0283
891
+ [ Thu Sep 15 03:39:44 2022 ] Eval epoch: 120
892
+ [ Thu Sep 15 03:41:17 2022 ] Mean test loss of 796 batches: 2.7935328483581543.
893
+ [ Thu Sep 15 03:41:17 2022 ] Top1: 53.19%
894
+ [ Thu Sep 15 03:41:17 2022 ] Top5: 82.61%
895
+ [ Thu Sep 15 03:41:18 2022 ] Training epoch: 121
896
+ [ Thu Sep 15 03:41:50 2022 ] Batch(39/243) done. Loss: 0.0101 lr:0.000100 network_time: 0.0292
897
+ [ Thu Sep 15 03:43:03 2022 ] Batch(139/243) done. Loss: 0.0053 lr:0.000100 network_time: 0.0309
898
+ [ Thu Sep 15 03:44:16 2022 ] Batch(239/243) done. Loss: 0.0054 lr:0.000100 network_time: 0.0265
899
+ [ Thu Sep 15 03:44:18 2022 ] Eval epoch: 121
900
+ [ Thu Sep 15 03:45:51 2022 ] Mean test loss of 796 batches: 2.624255657196045.
901
+ [ Thu Sep 15 03:45:52 2022 ] Top1: 53.57%
902
+ [ Thu Sep 15 03:45:52 2022 ] Top5: 83.05%
903
+ [ Thu Sep 15 03:45:53 2022 ] Training epoch: 122
904
+ [ Thu Sep 15 03:47:06 2022 ] Batch(96/243) done. Loss: 0.0058 lr:0.000100 network_time: 0.0307
905
+ [ Thu Sep 15 03:48:19 2022 ] Batch(196/243) done. Loss: 0.0084 lr:0.000100 network_time: 0.0303
906
+ [ Thu Sep 15 03:48:53 2022 ] Eval epoch: 122
907
+ [ Thu Sep 15 03:50:26 2022 ] Mean test loss of 796 batches: 2.694089889526367.
908
+ [ Thu Sep 15 03:50:26 2022 ] Top1: 52.66%
909
+ [ Thu Sep 15 03:50:27 2022 ] Top5: 82.58%
910
+ [ Thu Sep 15 03:50:27 2022 ] Training epoch: 123
911
+ [ Thu Sep 15 03:51:09 2022 ] Batch(53/243) done. Loss: 0.0045 lr:0.000100 network_time: 0.0282
912
+ [ Thu Sep 15 03:52:22 2022 ] Batch(153/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0311
913
+ [ Thu Sep 15 03:53:27 2022 ] Eval epoch: 123
914
+ [ Thu Sep 15 03:55:01 2022 ] Mean test loss of 796 batches: 2.6755058765411377.
915
+ [ Thu Sep 15 03:55:01 2022 ] Top1: 52.82%
916
+ [ Thu Sep 15 03:55:01 2022 ] Top5: 82.64%
917
+ [ Thu Sep 15 03:55:02 2022 ] Training epoch: 124
918
+ [ Thu Sep 15 03:55:13 2022 ] Batch(10/243) done. Loss: 0.0052 lr:0.000100 network_time: 0.0279
919
+ [ Thu Sep 15 03:56:26 2022 ] Batch(110/243) done. Loss: 0.0048 lr:0.000100 network_time: 0.0298
920
+ [ Thu Sep 15 03:57:39 2022 ] Batch(210/243) done. Loss: 0.0032 lr:0.000100 network_time: 0.0269
921
+ [ Thu Sep 15 03:58:02 2022 ] Eval epoch: 124
922
+ [ Thu Sep 15 03:59:35 2022 ] Mean test loss of 796 batches: 2.736020803451538.
923
+ [ Thu Sep 15 03:59:36 2022 ] Top1: 53.41%
924
+ [ Thu Sep 15 03:59:36 2022 ] Top5: 82.84%
925
+ [ Thu Sep 15 03:59:36 2022 ] Training epoch: 125
926
+ [ Thu Sep 15 04:00:29 2022 ] Batch(67/243) done. Loss: 0.0056 lr:0.000100 network_time: 0.0287
927
+ [ Thu Sep 15 04:01:42 2022 ] Batch(167/243) done. Loss: 0.0105 lr:0.000100 network_time: 0.0271
928
+ [ Thu Sep 15 04:02:37 2022 ] Eval epoch: 125
929
+ [ Thu Sep 15 04:04:10 2022 ] Mean test loss of 796 batches: 2.765360116958618.
930
+ [ Thu Sep 15 04:04:10 2022 ] Top1: 51.01%
931
+ [ Thu Sep 15 04:04:10 2022 ] Top5: 81.69%
932
+ [ Thu Sep 15 04:04:11 2022 ] Training epoch: 126
933
+ [ Thu Sep 15 04:04:32 2022 ] Batch(24/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0307
934
+ [ Thu Sep 15 04:05:45 2022 ] Batch(124/243) done. Loss: 0.0100 lr:0.000100 network_time: 0.0299
935
+ [ Thu Sep 15 04:06:58 2022 ] Batch(224/243) done. Loss: 0.0066 lr:0.000100 network_time: 0.0310
936
+ [ Thu Sep 15 04:07:11 2022 ] Eval epoch: 126
937
+ [ Thu Sep 15 04:08:44 2022 ] Mean test loss of 796 batches: 2.742004156112671.
938
+ [ Thu Sep 15 04:08:44 2022 ] Top1: 53.03%
939
+ [ Thu Sep 15 04:08:45 2022 ] Top5: 82.69%
940
+ [ Thu Sep 15 04:08:45 2022 ] Training epoch: 127
941
+ [ Thu Sep 15 04:09:48 2022 ] Batch(81/243) done. Loss: 0.0123 lr:0.000100 network_time: 0.0330
942
+ [ Thu Sep 15 04:11:01 2022 ] Batch(181/243) done. Loss: 0.0086 lr:0.000100 network_time: 0.0273
943
+ [ Thu Sep 15 04:11:45 2022 ] Eval epoch: 127
944
+ [ Thu Sep 15 04:13:19 2022 ] Mean test loss of 796 batches: 2.7168538570404053.
945
+ [ Thu Sep 15 04:13:19 2022 ] Top1: 52.94%
946
+ [ Thu Sep 15 04:13:19 2022 ] Top5: 82.64%
947
+ [ Thu Sep 15 04:13:20 2022 ] Training epoch: 128
948
+ [ Thu Sep 15 04:13:51 2022 ] Batch(38/243) done. Loss: 0.0030 lr:0.000100 network_time: 0.0319
949
+ [ Thu Sep 15 04:15:04 2022 ] Batch(138/243) done. Loss: 0.0057 lr:0.000100 network_time: 0.0274
950
+ [ Thu Sep 15 04:16:17 2022 ] Batch(238/243) done. Loss: 0.0104 lr:0.000100 network_time: 0.0268
951
+ [ Thu Sep 15 04:16:20 2022 ] Eval epoch: 128
952
+ [ Thu Sep 15 04:17:53 2022 ] Mean test loss of 796 batches: 2.6489810943603516.
953
+ [ Thu Sep 15 04:17:53 2022 ] Top1: 53.19%
954
+ [ Thu Sep 15 04:17:54 2022 ] Top5: 82.92%
955
+ [ Thu Sep 15 04:17:54 2022 ] Training epoch: 129
956
+ [ Thu Sep 15 04:19:07 2022 ] Batch(95/243) done. Loss: 0.0094 lr:0.000100 network_time: 0.0305
957
+ [ Thu Sep 15 04:20:19 2022 ] Batch(195/243) done. Loss: 0.0079 lr:0.000100 network_time: 0.0319
958
+ [ Thu Sep 15 04:20:54 2022 ] Eval epoch: 129
959
+ [ Thu Sep 15 04:22:28 2022 ] Mean test loss of 796 batches: 2.7015435695648193.
960
+ [ Thu Sep 15 04:22:28 2022 ] Top1: 51.93%
961
+ [ Thu Sep 15 04:22:29 2022 ] Top5: 82.12%
962
+ [ Thu Sep 15 04:22:29 2022 ] Training epoch: 130
963
+ [ Thu Sep 15 04:23:11 2022 ] Batch(52/243) done. Loss: 0.0055 lr:0.000100 network_time: 0.0307
964
+ [ Thu Sep 15 04:24:23 2022 ] Batch(152/243) done. Loss: 0.0051 lr:0.000100 network_time: 0.0332
965
+ [ Thu Sep 15 04:25:29 2022 ] Eval epoch: 130
966
+ [ Thu Sep 15 04:27:03 2022 ] Mean test loss of 796 batches: 2.6610214710235596.
967
+ [ Thu Sep 15 04:27:04 2022 ] Top1: 53.26%
968
+ [ Thu Sep 15 04:27:04 2022 ] Top5: 82.82%
969
+ [ Thu Sep 15 04:27:04 2022 ] Training epoch: 131
970
+ [ Thu Sep 15 04:27:15 2022 ] Batch(9/243) done. Loss: 0.0044 lr:0.000100 network_time: 0.0285
971
+ [ Thu Sep 15 04:28:28 2022 ] Batch(109/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0311
972
+ [ Thu Sep 15 04:29:41 2022 ] Batch(209/243) done. Loss: 0.0111 lr:0.000100 network_time: 0.0398
973
+ [ Thu Sep 15 04:30:05 2022 ] Eval epoch: 131
974
+ [ Thu Sep 15 04:31:39 2022 ] Mean test loss of 796 batches: 2.6898159980773926.
975
+ [ Thu Sep 15 04:31:39 2022 ] Top1: 53.02%
976
+ [ Thu Sep 15 04:31:40 2022 ] Top5: 82.60%
977
+ [ Thu Sep 15 04:31:40 2022 ] Training epoch: 132
978
+ [ Thu Sep 15 04:32:32 2022 ] Batch(66/243) done. Loss: 0.0019 lr:0.000100 network_time: 0.0278
979
+ [ Thu Sep 15 04:33:45 2022 ] Batch(166/243) done. Loss: 0.0094 lr:0.000100 network_time: 0.0226
980
+ [ Thu Sep 15 04:34:40 2022 ] Eval epoch: 132
981
+ [ Thu Sep 15 04:36:14 2022 ] Mean test loss of 796 batches: 2.6874382495880127.
982
+ [ Thu Sep 15 04:36:14 2022 ] Top1: 52.61%
983
+ [ Thu Sep 15 04:36:15 2022 ] Top5: 82.28%
984
+ [ Thu Sep 15 04:36:15 2022 ] Training epoch: 133
985
+ [ Thu Sep 15 04:36:35 2022 ] Batch(23/243) done. Loss: 0.0040 lr:0.000100 network_time: 0.0257
986
+ [ Thu Sep 15 04:37:48 2022 ] Batch(123/243) done. Loss: 0.0057 lr:0.000100 network_time: 0.0328
987
+ [ Thu Sep 15 04:39:01 2022 ] Batch(223/243) done. Loss: 0.0105 lr:0.000100 network_time: 0.0271
988
+ [ Thu Sep 15 04:39:15 2022 ] Eval epoch: 133
989
+ [ Thu Sep 15 04:40:48 2022 ] Mean test loss of 796 batches: 2.885190725326538.
990
+ [ Thu Sep 15 04:40:48 2022 ] Top1: 49.37%
991
+ [ Thu Sep 15 04:40:49 2022 ] Top5: 80.21%
992
+ [ Thu Sep 15 04:40:49 2022 ] Training epoch: 134
993
+ [ Thu Sep 15 04:41:51 2022 ] Batch(80/243) done. Loss: 0.0058 lr:0.000100 network_time: 0.0280
994
+ [ Thu Sep 15 04:43:04 2022 ] Batch(180/243) done. Loss: 0.0040 lr:0.000100 network_time: 0.0283
995
+ [ Thu Sep 15 04:43:49 2022 ] Eval epoch: 134
996
+ [ Thu Sep 15 04:45:22 2022 ] Mean test loss of 796 batches: 2.6978790760040283.
997
+ [ Thu Sep 15 04:45:23 2022 ] Top1: 52.91%
998
+ [ Thu Sep 15 04:45:23 2022 ] Top5: 82.54%
999
+ [ Thu Sep 15 04:45:23 2022 ] Training epoch: 135
1000
+ [ Thu Sep 15 04:45:54 2022 ] Batch(37/243) done. Loss: 0.0038 lr:0.000100 network_time: 0.0266
1001
+ [ Thu Sep 15 04:47:07 2022 ] Batch(137/243) done. Loss: 0.0059 lr:0.000100 network_time: 0.0311
1002
+ [ Thu Sep 15 04:48:20 2022 ] Batch(237/243) done. Loss: 0.0091 lr:0.000100 network_time: 0.0278
1003
+ [ Thu Sep 15 04:48:24 2022 ] Eval epoch: 135
1004
+ [ Thu Sep 15 04:49:58 2022 ] Mean test loss of 796 batches: 2.644139289855957.
1005
+ [ Thu Sep 15 04:49:58 2022 ] Top1: 53.61%
1006
+ [ Thu Sep 15 04:49:58 2022 ] Top5: 83.10%
1007
+ [ Thu Sep 15 04:49:58 2022 ] Training epoch: 136
1008
+ [ Thu Sep 15 04:51:11 2022 ] Batch(94/243) done. Loss: 0.0077 lr:0.000100 network_time: 0.0305
1009
+ [ Thu Sep 15 04:52:24 2022 ] Batch(194/243) done. Loss: 0.0081 lr:0.000100 network_time: 0.0272
1010
+ [ Thu Sep 15 04:52:59 2022 ] Eval epoch: 136
1011
+ [ Thu Sep 15 04:54:32 2022 ] Mean test loss of 796 batches: 2.7351062297821045.
1012
+ [ Thu Sep 15 04:54:33 2022 ] Top1: 53.33%
1013
+ [ Thu Sep 15 04:54:34 2022 ] Top5: 82.60%
1014
+ [ Thu Sep 15 04:54:34 2022 ] Training epoch: 137
1015
+ [ Thu Sep 15 04:55:15 2022 ] Batch(51/243) done. Loss: 0.0118 lr:0.000100 network_time: 0.0272
1016
+ [ Thu Sep 15 04:56:27 2022 ] Batch(151/243) done. Loss: 0.0092 lr:0.000100 network_time: 0.0269
1017
+ [ Thu Sep 15 04:57:34 2022 ] Eval epoch: 137
1018
+ [ Thu Sep 15 04:59:07 2022 ] Mean test loss of 796 batches: 2.6785876750946045.
1019
+ [ Thu Sep 15 04:59:07 2022 ] Top1: 53.81%
1020
+ [ Thu Sep 15 04:59:08 2022 ] Top5: 83.08%
1021
+ [ Thu Sep 15 04:59:08 2022 ] Training epoch: 138
1022
+ [ Thu Sep 15 04:59:18 2022 ] Batch(8/243) done. Loss: 0.0041 lr:0.000100 network_time: 0.0278
1023
+ [ Thu Sep 15 05:00:30 2022 ] Batch(108/243) done. Loss: 0.0059 lr:0.000100 network_time: 0.0266
1024
+ [ Thu Sep 15 05:01:43 2022 ] Batch(208/243) done. Loss: 0.0105 lr:0.000100 network_time: 0.0305
1025
+ [ Thu Sep 15 05:02:08 2022 ] Eval epoch: 138
1026
+ [ Thu Sep 15 05:03:42 2022 ] Mean test loss of 796 batches: 2.7690837383270264.
1027
+ [ Thu Sep 15 05:03:42 2022 ] Top1: 52.37%
1028
+ [ Thu Sep 15 05:03:43 2022 ] Top5: 82.15%
1029
+ [ Thu Sep 15 05:03:43 2022 ] Training epoch: 139
1030
+ [ Thu Sep 15 05:04:34 2022 ] Batch(65/243) done. Loss: 0.0055 lr:0.000100 network_time: 0.0451
1031
+ [ Thu Sep 15 05:05:47 2022 ] Batch(165/243) done. Loss: 0.0025 lr:0.000100 network_time: 0.0278
1032
+ [ Thu Sep 15 05:06:43 2022 ] Eval epoch: 139
1033
+ [ Thu Sep 15 05:08:16 2022 ] Mean test loss of 796 batches: 2.723361015319824.
1034
+ [ Thu Sep 15 05:08:16 2022 ] Top1: 52.81%
1035
+ [ Thu Sep 15 05:08:17 2022 ] Top5: 82.44%
1036
+ [ Thu Sep 15 05:08:17 2022 ] Training epoch: 140
1037
+ [ Thu Sep 15 05:08:36 2022 ] Batch(22/243) done. Loss: 0.0100 lr:0.000100 network_time: 0.0315
1038
+ [ Thu Sep 15 05:09:49 2022 ] Batch(122/243) done. Loss: 0.0170 lr:0.000100 network_time: 0.0297
1039
+ [ Thu Sep 15 05:11:02 2022 ] Batch(222/243) done. Loss: 0.0133 lr:0.000100 network_time: 0.0276
1040
+ [ Thu Sep 15 05:11:17 2022 ] Eval epoch: 140
1041
+ [ Thu Sep 15 05:12:49 2022 ] Mean test loss of 796 batches: 2.719053030014038.
1042
+ [ Thu Sep 15 05:12:50 2022 ] Top1: 53.15%
1043
+ [ Thu Sep 15 05:12:50 2022 ] Top5: 82.74%
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_motion_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_bone_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xsub/train_bone.yaml
5
+ device:
6
+ - 0
7
+ - 1
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_bone_xsub
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_bone_xsub
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a1052d9d03789bd0448c62585aae1cc2edbab1c92ceaf3e29ecfb558c94dd972
3
+ size 29946137
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/log.txt ADDED
@@ -0,0 +1,1043 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 14 18:31:34 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_bone_xsub', 'model_saved_name': './save_models/ntu120_bone_xsub', 'Experiment_name': 'ntu120_bone_xsub', 'config': './config/ntu120_xsub/train_bone.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [0, 1], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Wed Sep 14 18:31:34 2022 ] Training epoch: 1
5
+ [ Wed Sep 14 18:32:53 2022 ] Batch(99/243) done. Loss: 4.0333 lr:0.100000 network_time: 0.0258
6
+ [ Wed Sep 14 18:34:05 2022 ] Batch(199/243) done. Loss: 2.7616 lr:0.100000 network_time: 0.0307
7
+ [ Wed Sep 14 18:34:37 2022 ] Eval epoch: 1
8
+ [ Wed Sep 14 18:36:11 2022 ] Mean test loss of 796 batches: 5.216921806335449.
9
+ [ Wed Sep 14 18:36:12 2022 ] Top1: 8.18%
10
+ [ Wed Sep 14 18:36:12 2022 ] Top5: 22.12%
11
+ [ Wed Sep 14 18:36:12 2022 ] Training epoch: 2
12
+ [ Wed Sep 14 18:36:57 2022 ] Batch(56/243) done. Loss: 2.9609 lr:0.100000 network_time: 0.0478
13
+ [ Wed Sep 14 18:38:09 2022 ] Batch(156/243) done. Loss: 2.6439 lr:0.100000 network_time: 0.0272
14
+ [ Wed Sep 14 18:39:12 2022 ] Eval epoch: 2
15
+ [ Wed Sep 14 18:40:46 2022 ] Mean test loss of 796 batches: 4.073986053466797.
16
+ [ Wed Sep 14 18:40:46 2022 ] Top1: 16.06%
17
+ [ Wed Sep 14 18:40:46 2022 ] Top5: 34.36%
18
+ [ Wed Sep 14 18:40:47 2022 ] Training epoch: 3
19
+ [ Wed Sep 14 18:41:00 2022 ] Batch(13/243) done. Loss: 1.7683 lr:0.100000 network_time: 0.0276
20
+ [ Wed Sep 14 18:42:13 2022 ] Batch(113/243) done. Loss: 1.9985 lr:0.100000 network_time: 0.0261
21
+ [ Wed Sep 14 18:43:25 2022 ] Batch(213/243) done. Loss: 2.0935 lr:0.100000 network_time: 0.0276
22
+ [ Wed Sep 14 18:43:46 2022 ] Eval epoch: 3
23
+ [ Wed Sep 14 18:45:21 2022 ] Mean test loss of 796 batches: 3.8530473709106445.
24
+ [ Wed Sep 14 18:45:21 2022 ] Top1: 17.67%
25
+ [ Wed Sep 14 18:45:21 2022 ] Top5: 40.53%
26
+ [ Wed Sep 14 18:45:22 2022 ] Training epoch: 4
27
+ [ Wed Sep 14 18:46:16 2022 ] Batch(70/243) done. Loss: 1.3666 lr:0.100000 network_time: 0.0311
28
+ [ Wed Sep 14 18:47:28 2022 ] Batch(170/243) done. Loss: 1.5859 lr:0.100000 network_time: 0.0311
29
+ [ Wed Sep 14 18:48:21 2022 ] Eval epoch: 4
30
+ [ Wed Sep 14 18:49:55 2022 ] Mean test loss of 796 batches: 3.4396579265594482.
31
+ [ Wed Sep 14 18:49:56 2022 ] Top1: 22.98%
32
+ [ Wed Sep 14 18:49:56 2022 ] Top5: 51.04%
33
+ [ Wed Sep 14 18:49:56 2022 ] Training epoch: 5
34
+ [ Wed Sep 14 18:50:20 2022 ] Batch(27/243) done. Loss: 1.4973 lr:0.100000 network_time: 0.0293
35
+ [ Wed Sep 14 18:51:32 2022 ] Batch(127/243) done. Loss: 1.3234 lr:0.100000 network_time: 0.0268
36
+ [ Wed Sep 14 18:52:45 2022 ] Batch(227/243) done. Loss: 1.5324 lr:0.100000 network_time: 0.0314
37
+ [ Wed Sep 14 18:52:56 2022 ] Eval epoch: 5
38
+ [ Wed Sep 14 18:54:30 2022 ] Mean test loss of 796 batches: 3.1425249576568604.
39
+ [ Wed Sep 14 18:54:30 2022 ] Top1: 26.86%
40
+ [ Wed Sep 14 18:54:31 2022 ] Top5: 56.16%
41
+ [ Wed Sep 14 18:54:31 2022 ] Training epoch: 6
42
+ [ Wed Sep 14 18:55:35 2022 ] Batch(84/243) done. Loss: 1.4138 lr:0.100000 network_time: 0.0277
43
+ [ Wed Sep 14 18:56:48 2022 ] Batch(184/243) done. Loss: 1.0573 lr:0.100000 network_time: 0.0303
44
+ [ Wed Sep 14 18:57:30 2022 ] Eval epoch: 6
45
+ [ Wed Sep 14 18:59:05 2022 ] Mean test loss of 796 batches: 3.0790634155273438.
46
+ [ Wed Sep 14 18:59:05 2022 ] Top1: 29.51%
47
+ [ Wed Sep 14 18:59:06 2022 ] Top5: 62.24%
48
+ [ Wed Sep 14 18:59:06 2022 ] Training epoch: 7
49
+ [ Wed Sep 14 18:59:39 2022 ] Batch(41/243) done. Loss: 1.3634 lr:0.100000 network_time: 0.0323
50
+ [ Wed Sep 14 19:00:52 2022 ] Batch(141/243) done. Loss: 0.9326 lr:0.100000 network_time: 0.0267
51
+ [ Wed Sep 14 19:02:05 2022 ] Batch(241/243) done. Loss: 1.1295 lr:0.100000 network_time: 0.0270
52
+ [ Wed Sep 14 19:02:05 2022 ] Eval epoch: 7
53
+ [ Wed Sep 14 19:03:39 2022 ] Mean test loss of 796 batches: 2.927468776702881.
54
+ [ Wed Sep 14 19:03:40 2022 ] Top1: 30.35%
55
+ [ Wed Sep 14 19:03:40 2022 ] Top5: 64.28%
56
+ [ Wed Sep 14 19:03:40 2022 ] Training epoch: 8
57
+ [ Wed Sep 14 19:04:55 2022 ] Batch(98/243) done. Loss: 0.7392 lr:0.100000 network_time: 0.0268
58
+ [ Wed Sep 14 19:06:07 2022 ] Batch(198/243) done. Loss: 0.8407 lr:0.100000 network_time: 0.0263
59
+ [ Wed Sep 14 19:06:40 2022 ] Eval epoch: 8
60
+ [ Wed Sep 14 19:08:14 2022 ] Mean test loss of 796 batches: 2.743701457977295.
61
+ [ Wed Sep 14 19:08:14 2022 ] Top1: 33.74%
62
+ [ Wed Sep 14 19:08:15 2022 ] Top5: 66.94%
63
+ [ Wed Sep 14 19:08:15 2022 ] Training epoch: 9
64
+ [ Wed Sep 14 19:08:58 2022 ] Batch(55/243) done. Loss: 1.3084 lr:0.100000 network_time: 0.0283
65
+ [ Wed Sep 14 19:10:11 2022 ] Batch(155/243) done. Loss: 1.2042 lr:0.100000 network_time: 0.0276
66
+ [ Wed Sep 14 19:11:14 2022 ] Eval epoch: 9
67
+ [ Wed Sep 14 19:12:48 2022 ] Mean test loss of 796 batches: 3.0599684715270996.
68
+ [ Wed Sep 14 19:12:48 2022 ] Top1: 29.85%
69
+ [ Wed Sep 14 19:12:49 2022 ] Top5: 60.50%
70
+ [ Wed Sep 14 19:12:49 2022 ] Training epoch: 10
71
+ [ Wed Sep 14 19:13:01 2022 ] Batch(12/243) done. Loss: 1.1141 lr:0.100000 network_time: 0.0263
72
+ [ Wed Sep 14 19:14:14 2022 ] Batch(112/243) done. Loss: 1.2123 lr:0.100000 network_time: 0.0273
73
+ [ Wed Sep 14 19:15:26 2022 ] Batch(212/243) done. Loss: 1.2073 lr:0.100000 network_time: 0.0266
74
+ [ Wed Sep 14 19:15:48 2022 ] Eval epoch: 10
75
+ [ Wed Sep 14 19:17:23 2022 ] Mean test loss of 796 batches: 2.714928388595581.
76
+ [ Wed Sep 14 19:17:23 2022 ] Top1: 36.63%
77
+ [ Wed Sep 14 19:17:24 2022 ] Top5: 69.90%
78
+ [ Wed Sep 14 19:17:24 2022 ] Training epoch: 11
79
+ [ Wed Sep 14 19:18:17 2022 ] Batch(69/243) done. Loss: 1.0347 lr:0.100000 network_time: 0.0312
80
+ [ Wed Sep 14 19:19:30 2022 ] Batch(169/243) done. Loss: 0.8520 lr:0.100000 network_time: 0.0276
81
+ [ Wed Sep 14 19:20:23 2022 ] Eval epoch: 11
82
+ [ Wed Sep 14 19:21:58 2022 ] Mean test loss of 796 batches: 2.830739736557007.
83
+ [ Wed Sep 14 19:21:58 2022 ] Top1: 32.54%
84
+ [ Wed Sep 14 19:21:59 2022 ] Top5: 67.73%
85
+ [ Wed Sep 14 19:21:59 2022 ] Training epoch: 12
86
+ [ Wed Sep 14 19:22:21 2022 ] Batch(26/243) done. Loss: 0.6660 lr:0.100000 network_time: 0.0264
87
+ [ Wed Sep 14 19:23:34 2022 ] Batch(126/243) done. Loss: 0.9330 lr:0.100000 network_time: 0.0293
88
+ [ Wed Sep 14 19:24:46 2022 ] Batch(226/243) done. Loss: 1.0268 lr:0.100000 network_time: 0.0297
89
+ [ Wed Sep 14 19:24:58 2022 ] Eval epoch: 12
90
+ [ Wed Sep 14 19:26:32 2022 ] Mean test loss of 796 batches: 3.2985918521881104.
91
+ [ Wed Sep 14 19:26:33 2022 ] Top1: 29.22%
92
+ [ Wed Sep 14 19:26:33 2022 ] Top5: 63.29%
93
+ [ Wed Sep 14 19:26:33 2022 ] Training epoch: 13
94
+ [ Wed Sep 14 19:27:37 2022 ] Batch(83/243) done. Loss: 0.7545 lr:0.100000 network_time: 0.0276
95
+ [ Wed Sep 14 19:28:50 2022 ] Batch(183/243) done. Loss: 0.6929 lr:0.100000 network_time: 0.0271
96
+ [ Wed Sep 14 19:29:33 2022 ] Eval epoch: 13
97
+ [ Wed Sep 14 19:31:08 2022 ] Mean test loss of 796 batches: 2.562110424041748.
98
+ [ Wed Sep 14 19:31:08 2022 ] Top1: 38.93%
99
+ [ Wed Sep 14 19:31:09 2022 ] Top5: 72.60%
100
+ [ Wed Sep 14 19:31:09 2022 ] Training epoch: 14
101
+ [ Wed Sep 14 19:31:42 2022 ] Batch(40/243) done. Loss: 0.5393 lr:0.100000 network_time: 0.0281
102
+ [ Wed Sep 14 19:32:54 2022 ] Batch(140/243) done. Loss: 0.9207 lr:0.100000 network_time: 0.0263
103
+ [ Wed Sep 14 19:34:07 2022 ] Batch(240/243) done. Loss: 0.8997 lr:0.100000 network_time: 0.0302
104
+ [ Wed Sep 14 19:34:09 2022 ] Eval epoch: 14
105
+ [ Wed Sep 14 19:35:43 2022 ] Mean test loss of 796 batches: 2.663928508758545.
106
+ [ Wed Sep 14 19:35:43 2022 ] Top1: 37.57%
107
+ [ Wed Sep 14 19:35:44 2022 ] Top5: 71.04%
108
+ [ Wed Sep 14 19:35:44 2022 ] Training epoch: 15
109
+ [ Wed Sep 14 19:36:58 2022 ] Batch(97/243) done. Loss: 0.6897 lr:0.100000 network_time: 0.0320
110
+ [ Wed Sep 14 19:38:10 2022 ] Batch(197/243) done. Loss: 0.6975 lr:0.100000 network_time: 0.0268
111
+ [ Wed Sep 14 19:38:43 2022 ] Eval epoch: 15
112
+ [ Wed Sep 14 19:40:17 2022 ] Mean test loss of 796 batches: 2.4152395725250244.
113
+ [ Wed Sep 14 19:40:18 2022 ] Top1: 40.93%
114
+ [ Wed Sep 14 19:40:18 2022 ] Top5: 75.42%
115
+ [ Wed Sep 14 19:40:18 2022 ] Training epoch: 16
116
+ [ Wed Sep 14 19:41:01 2022 ] Batch(54/243) done. Loss: 0.3969 lr:0.100000 network_time: 0.0268
117
+ [ Wed Sep 14 19:42:14 2022 ] Batch(154/243) done. Loss: 0.7158 lr:0.100000 network_time: 0.0274
118
+ [ Wed Sep 14 19:43:18 2022 ] Eval epoch: 16
119
+ [ Wed Sep 14 19:44:52 2022 ] Mean test loss of 796 batches: 2.5785508155822754.
120
+ [ Wed Sep 14 19:44:53 2022 ] Top1: 38.90%
121
+ [ Wed Sep 14 19:44:53 2022 ] Top5: 72.23%
122
+ [ Wed Sep 14 19:44:53 2022 ] Training epoch: 17
123
+ [ Wed Sep 14 19:45:04 2022 ] Batch(11/243) done. Loss: 0.7059 lr:0.100000 network_time: 0.0301
124
+ [ Wed Sep 14 19:46:17 2022 ] Batch(111/243) done. Loss: 0.7283 lr:0.100000 network_time: 0.0274
125
+ [ Wed Sep 14 19:47:30 2022 ] Batch(211/243) done. Loss: 0.9291 lr:0.100000 network_time: 0.0276
126
+ [ Wed Sep 14 19:47:52 2022 ] Eval epoch: 17
127
+ [ Wed Sep 14 19:49:26 2022 ] Mean test loss of 796 batches: 2.557190179824829.
128
+ [ Wed Sep 14 19:49:27 2022 ] Top1: 40.76%
129
+ [ Wed Sep 14 19:49:27 2022 ] Top5: 74.72%
130
+ [ Wed Sep 14 19:49:27 2022 ] Training epoch: 18
131
+ [ Wed Sep 14 19:50:20 2022 ] Batch(68/243) done. Loss: 0.5587 lr:0.100000 network_time: 0.0295
132
+ [ Wed Sep 14 19:51:32 2022 ] Batch(168/243) done. Loss: 0.3922 lr:0.100000 network_time: 0.0279
133
+ [ Wed Sep 14 19:52:26 2022 ] Eval epoch: 18
134
+ [ Wed Sep 14 19:54:00 2022 ] Mean test loss of 796 batches: 3.573277235031128.
135
+ [ Wed Sep 14 19:54:01 2022 ] Top1: 33.62%
136
+ [ Wed Sep 14 19:54:01 2022 ] Top5: 67.25%
137
+ [ Wed Sep 14 19:54:01 2022 ] Training epoch: 19
138
+ [ Wed Sep 14 19:54:23 2022 ] Batch(25/243) done. Loss: 0.6683 lr:0.100000 network_time: 0.0347
139
+ [ Wed Sep 14 19:55:35 2022 ] Batch(125/243) done. Loss: 0.7018 lr:0.100000 network_time: 0.0299
140
+ [ Wed Sep 14 19:56:48 2022 ] Batch(225/243) done. Loss: 0.6643 lr:0.100000 network_time: 0.0296
141
+ [ Wed Sep 14 19:57:01 2022 ] Eval epoch: 19
142
+ [ Wed Sep 14 19:58:35 2022 ] Mean test loss of 796 batches: 2.8546314239501953.
143
+ [ Wed Sep 14 19:58:35 2022 ] Top1: 39.64%
144
+ [ Wed Sep 14 19:58:35 2022 ] Top5: 72.18%
145
+ [ Wed Sep 14 19:58:36 2022 ] Training epoch: 20
146
+ [ Wed Sep 14 19:59:39 2022 ] Batch(82/243) done. Loss: 0.4966 lr:0.100000 network_time: 0.0263
147
+ [ Wed Sep 14 20:00:51 2022 ] Batch(182/243) done. Loss: 0.4008 lr:0.100000 network_time: 0.0315
148
+ [ Wed Sep 14 20:01:35 2022 ] Eval epoch: 20
149
+ [ Wed Sep 14 20:03:09 2022 ] Mean test loss of 796 batches: 2.6202304363250732.
150
+ [ Wed Sep 14 20:03:10 2022 ] Top1: 40.66%
151
+ [ Wed Sep 14 20:03:10 2022 ] Top5: 74.78%
152
+ [ Wed Sep 14 20:03:10 2022 ] Training epoch: 21
153
+ [ Wed Sep 14 20:03:42 2022 ] Batch(39/243) done. Loss: 0.3239 lr:0.100000 network_time: 0.0299
154
+ [ Wed Sep 14 20:04:55 2022 ] Batch(139/243) done. Loss: 0.4198 lr:0.100000 network_time: 0.0265
155
+ [ Wed Sep 14 20:06:07 2022 ] Batch(239/243) done. Loss: 0.7220 lr:0.100000 network_time: 0.0266
156
+ [ Wed Sep 14 20:06:09 2022 ] Eval epoch: 21
157
+ [ Wed Sep 14 20:07:44 2022 ] Mean test loss of 796 batches: 2.5382893085479736.
158
+ [ Wed Sep 14 20:07:44 2022 ] Top1: 41.41%
159
+ [ Wed Sep 14 20:07:45 2022 ] Top5: 75.24%
160
+ [ Wed Sep 14 20:07:45 2022 ] Training epoch: 22
161
+ [ Wed Sep 14 20:08:58 2022 ] Batch(96/243) done. Loss: 0.4248 lr:0.100000 network_time: 0.0270
162
+ [ Wed Sep 14 20:10:11 2022 ] Batch(196/243) done. Loss: 0.4894 lr:0.100000 network_time: 0.0313
163
+ [ Wed Sep 14 20:10:44 2022 ] Eval epoch: 22
164
+ [ Wed Sep 14 20:12:19 2022 ] Mean test loss of 796 batches: 2.62750244140625.
165
+ [ Wed Sep 14 20:12:19 2022 ] Top1: 42.49%
166
+ [ Wed Sep 14 20:12:20 2022 ] Top5: 75.99%
167
+ [ Wed Sep 14 20:12:20 2022 ] Training epoch: 23
168
+ [ Wed Sep 14 20:13:02 2022 ] Batch(53/243) done. Loss: 0.3055 lr:0.100000 network_time: 0.0299
169
+ [ Wed Sep 14 20:14:15 2022 ] Batch(153/243) done. Loss: 0.6483 lr:0.100000 network_time: 0.0312
170
+ [ Wed Sep 14 20:15:19 2022 ] Eval epoch: 23
171
+ [ Wed Sep 14 20:16:55 2022 ] Mean test loss of 796 batches: 2.8056583404541016.
172
+ [ Wed Sep 14 20:16:55 2022 ] Top1: 39.39%
173
+ [ Wed Sep 14 20:16:56 2022 ] Top5: 72.92%
174
+ [ Wed Sep 14 20:16:56 2022 ] Training epoch: 24
175
+ [ Wed Sep 14 20:17:06 2022 ] Batch(10/243) done. Loss: 0.4323 lr:0.100000 network_time: 0.0284
176
+ [ Wed Sep 14 20:18:19 2022 ] Batch(110/243) done. Loss: 0.8153 lr:0.100000 network_time: 0.0274
177
+ [ Wed Sep 14 20:19:32 2022 ] Batch(210/243) done. Loss: 0.4780 lr:0.100000 network_time: 0.0277
178
+ [ Wed Sep 14 20:19:55 2022 ] Eval epoch: 24
179
+ [ Wed Sep 14 20:21:30 2022 ] Mean test loss of 796 batches: 2.699596405029297.
180
+ [ Wed Sep 14 20:21:30 2022 ] Top1: 42.59%
181
+ [ Wed Sep 14 20:21:31 2022 ] Top5: 74.85%
182
+ [ Wed Sep 14 20:21:31 2022 ] Training epoch: 25
183
+ [ Wed Sep 14 20:22:23 2022 ] Batch(67/243) done. Loss: 0.5537 lr:0.100000 network_time: 0.0284
184
+ [ Wed Sep 14 20:23:36 2022 ] Batch(167/243) done. Loss: 0.4563 lr:0.100000 network_time: 0.0269
185
+ [ Wed Sep 14 20:24:30 2022 ] Eval epoch: 25
186
+ [ Wed Sep 14 20:26:04 2022 ] Mean test loss of 796 batches: 2.6026360988616943.
187
+ [ Wed Sep 14 20:26:05 2022 ] Top1: 40.69%
188
+ [ Wed Sep 14 20:26:06 2022 ] Top5: 74.59%
189
+ [ Wed Sep 14 20:26:06 2022 ] Training epoch: 26
190
+ [ Wed Sep 14 20:26:27 2022 ] Batch(24/243) done. Loss: 0.4626 lr:0.100000 network_time: 0.0276
191
+ [ Wed Sep 14 20:27:39 2022 ] Batch(124/243) done. Loss: 0.4110 lr:0.100000 network_time: 0.0280
192
+ [ Wed Sep 14 20:28:52 2022 ] Batch(224/243) done. Loss: 0.6452 lr:0.100000 network_time: 0.0310
193
+ [ Wed Sep 14 20:29:05 2022 ] Eval epoch: 26
194
+ [ Wed Sep 14 20:30:39 2022 ] Mean test loss of 796 batches: 2.294908046722412.
195
+ [ Wed Sep 14 20:30:40 2022 ] Top1: 45.39%
196
+ [ Wed Sep 14 20:30:40 2022 ] Top5: 78.63%
197
+ [ Wed Sep 14 20:30:40 2022 ] Training epoch: 27
198
+ [ Wed Sep 14 20:31:42 2022 ] Batch(81/243) done. Loss: 0.5657 lr:0.100000 network_time: 0.0266
199
+ [ Wed Sep 14 20:32:55 2022 ] Batch(181/243) done. Loss: 0.4664 lr:0.100000 network_time: 0.0307
200
+ [ Wed Sep 14 20:33:39 2022 ] Eval epoch: 27
201
+ [ Wed Sep 14 20:35:14 2022 ] Mean test loss of 796 batches: 2.6094589233398438.
202
+ [ Wed Sep 14 20:35:14 2022 ] Top1: 39.99%
203
+ [ Wed Sep 14 20:35:14 2022 ] Top5: 73.88%
204
+ [ Wed Sep 14 20:35:15 2022 ] Training epoch: 28
205
+ [ Wed Sep 14 20:35:45 2022 ] Batch(38/243) done. Loss: 0.2889 lr:0.100000 network_time: 0.0319
206
+ [ Wed Sep 14 20:36:58 2022 ] Batch(138/243) done. Loss: 0.5320 lr:0.100000 network_time: 0.0270
207
+ [ Wed Sep 14 20:38:11 2022 ] Batch(238/243) done. Loss: 0.4978 lr:0.100000 network_time: 0.0313
208
+ [ Wed Sep 14 20:38:14 2022 ] Eval epoch: 28
209
+ [ Wed Sep 14 20:39:48 2022 ] Mean test loss of 796 batches: 2.759406089782715.
210
+ [ Wed Sep 14 20:39:48 2022 ] Top1: 40.93%
211
+ [ Wed Sep 14 20:39:49 2022 ] Top5: 73.80%
212
+ [ Wed Sep 14 20:39:49 2022 ] Training epoch: 29
213
+ [ Wed Sep 14 20:41:01 2022 ] Batch(95/243) done. Loss: 0.5033 lr:0.100000 network_time: 0.0310
214
+ [ Wed Sep 14 20:42:14 2022 ] Batch(195/243) done. Loss: 0.6511 lr:0.100000 network_time: 0.0275
215
+ [ Wed Sep 14 20:42:48 2022 ] Eval epoch: 29
216
+ [ Wed Sep 14 20:44:22 2022 ] Mean test loss of 796 batches: 2.587949275970459.
217
+ [ Wed Sep 14 20:44:23 2022 ] Top1: 42.27%
218
+ [ Wed Sep 14 20:44:24 2022 ] Top5: 75.12%
219
+ [ Wed Sep 14 20:44:24 2022 ] Training epoch: 30
220
+ [ Wed Sep 14 20:45:05 2022 ] Batch(52/243) done. Loss: 0.2909 lr:0.100000 network_time: 0.0269
221
+ [ Wed Sep 14 20:46:17 2022 ] Batch(152/243) done. Loss: 0.3604 lr:0.100000 network_time: 0.0255
222
+ [ Wed Sep 14 20:47:23 2022 ] Eval epoch: 30
223
+ [ Wed Sep 14 20:48:57 2022 ] Mean test loss of 796 batches: 2.8285579681396484.
224
+ [ Wed Sep 14 20:48:57 2022 ] Top1: 42.68%
225
+ [ Wed Sep 14 20:48:58 2022 ] Top5: 76.09%
226
+ [ Wed Sep 14 20:48:58 2022 ] Training epoch: 31
227
+ [ Wed Sep 14 20:49:08 2022 ] Batch(9/243) done. Loss: 0.3543 lr:0.100000 network_time: 0.0257
228
+ [ Wed Sep 14 20:50:20 2022 ] Batch(109/243) done. Loss: 0.2743 lr:0.100000 network_time: 0.0277
229
+ [ Wed Sep 14 20:51:33 2022 ] Batch(209/243) done. Loss: 0.5345 lr:0.100000 network_time: 0.0279
230
+ [ Wed Sep 14 20:51:57 2022 ] Eval epoch: 31
231
+ [ Wed Sep 14 20:53:31 2022 ] Mean test loss of 796 batches: 2.879929542541504.
232
+ [ Wed Sep 14 20:53:32 2022 ] Top1: 41.34%
233
+ [ Wed Sep 14 20:53:32 2022 ] Top5: 74.41%
234
+ [ Wed Sep 14 20:53:32 2022 ] Training epoch: 32
235
+ [ Wed Sep 14 20:54:24 2022 ] Batch(66/243) done. Loss: 0.4370 lr:0.100000 network_time: 0.0273
236
+ [ Wed Sep 14 20:55:37 2022 ] Batch(166/243) done. Loss: 0.5667 lr:0.100000 network_time: 0.0312
237
+ [ Wed Sep 14 20:56:32 2022 ] Eval epoch: 32
238
+ [ Wed Sep 14 20:58:06 2022 ] Mean test loss of 796 batches: 2.7939627170562744.
239
+ [ Wed Sep 14 20:58:06 2022 ] Top1: 41.09%
240
+ [ Wed Sep 14 20:58:07 2022 ] Top5: 74.35%
241
+ [ Wed Sep 14 20:58:07 2022 ] Training epoch: 33
242
+ [ Wed Sep 14 20:58:27 2022 ] Batch(23/243) done. Loss: 0.4639 lr:0.100000 network_time: 0.0295
243
+ [ Wed Sep 14 20:59:40 2022 ] Batch(123/243) done. Loss: 0.6343 lr:0.100000 network_time: 0.0281
244
+ [ Wed Sep 14 21:00:52 2022 ] Batch(223/243) done. Loss: 0.6872 lr:0.100000 network_time: 0.0264
245
+ [ Wed Sep 14 21:01:06 2022 ] Eval epoch: 33
246
+ [ Wed Sep 14 21:02:41 2022 ] Mean test loss of 796 batches: 2.557569980621338.
247
+ [ Wed Sep 14 21:02:41 2022 ] Top1: 43.26%
248
+ [ Wed Sep 14 21:02:42 2022 ] Top5: 75.92%
249
+ [ Wed Sep 14 21:02:42 2022 ] Training epoch: 34
250
+ [ Wed Sep 14 21:03:43 2022 ] Batch(80/243) done. Loss: 0.2682 lr:0.100000 network_time: 0.0271
251
+ [ Wed Sep 14 21:04:56 2022 ] Batch(180/243) done. Loss: 0.3253 lr:0.100000 network_time: 0.0424
252
+ [ Wed Sep 14 21:05:41 2022 ] Eval epoch: 34
253
+ [ Wed Sep 14 21:07:15 2022 ] Mean test loss of 796 batches: 2.8215174674987793.
254
+ [ Wed Sep 14 21:07:16 2022 ] Top1: 44.03%
255
+ [ Wed Sep 14 21:07:16 2022 ] Top5: 75.30%
256
+ [ Wed Sep 14 21:07:17 2022 ] Training epoch: 35
257
+ [ Wed Sep 14 21:07:47 2022 ] Batch(37/243) done. Loss: 0.3251 lr:0.100000 network_time: 0.0257
258
+ [ Wed Sep 14 21:08:59 2022 ] Batch(137/243) done. Loss: 0.3628 lr:0.100000 network_time: 0.0301
259
+ [ Wed Sep 14 21:10:12 2022 ] Batch(237/243) done. Loss: 0.4573 lr:0.100000 network_time: 0.0282
260
+ [ Wed Sep 14 21:10:16 2022 ] Eval epoch: 35
261
+ [ Wed Sep 14 21:11:50 2022 ] Mean test loss of 796 batches: 2.5117599964141846.
262
+ [ Wed Sep 14 21:11:51 2022 ] Top1: 46.17%
263
+ [ Wed Sep 14 21:11:51 2022 ] Top5: 78.81%
264
+ [ Wed Sep 14 21:11:51 2022 ] Training epoch: 36
265
+ [ Wed Sep 14 21:13:03 2022 ] Batch(94/243) done. Loss: 0.4862 lr:0.100000 network_time: 0.0277
266
+ [ Wed Sep 14 21:14:16 2022 ] Batch(194/243) done. Loss: 0.3950 lr:0.100000 network_time: 0.0279
267
+ [ Wed Sep 14 21:14:51 2022 ] Eval epoch: 36
268
+ [ Wed Sep 14 21:16:25 2022 ] Mean test loss of 796 batches: 2.567964553833008.
269
+ [ Wed Sep 14 21:16:25 2022 ] Top1: 45.67%
270
+ [ Wed Sep 14 21:16:26 2022 ] Top5: 77.48%
271
+ [ Wed Sep 14 21:16:26 2022 ] Training epoch: 37
272
+ [ Wed Sep 14 21:17:06 2022 ] Batch(51/243) done. Loss: 0.2134 lr:0.100000 network_time: 0.0267
273
+ [ Wed Sep 14 21:18:19 2022 ] Batch(151/243) done. Loss: 0.3702 lr:0.100000 network_time: 0.0268
274
+ [ Wed Sep 14 21:19:25 2022 ] Eval epoch: 37
275
+ [ Wed Sep 14 21:21:00 2022 ] Mean test loss of 796 batches: 2.9781603813171387.
276
+ [ Wed Sep 14 21:21:00 2022 ] Top1: 41.35%
277
+ [ Wed Sep 14 21:21:01 2022 ] Top5: 74.87%
278
+ [ Wed Sep 14 21:21:01 2022 ] Training epoch: 38
279
+ [ Wed Sep 14 21:21:10 2022 ] Batch(8/243) done. Loss: 0.4388 lr:0.100000 network_time: 0.0315
280
+ [ Wed Sep 14 21:22:22 2022 ] Batch(108/243) done. Loss: 0.3741 lr:0.100000 network_time: 0.0425
281
+ [ Wed Sep 14 21:23:35 2022 ] Batch(208/243) done. Loss: 0.3047 lr:0.100000 network_time: 0.0265
282
+ [ Wed Sep 14 21:24:00 2022 ] Eval epoch: 38
283
+ [ Wed Sep 14 21:25:34 2022 ] Mean test loss of 796 batches: 2.579361915588379.
284
+ [ Wed Sep 14 21:25:35 2022 ] Top1: 46.73%
285
+ [ Wed Sep 14 21:25:35 2022 ] Top5: 77.56%
286
+ [ Wed Sep 14 21:25:35 2022 ] Training epoch: 39
287
+ [ Wed Sep 14 21:26:25 2022 ] Batch(65/243) done. Loss: 0.5252 lr:0.100000 network_time: 0.0289
288
+ [ Wed Sep 14 21:27:38 2022 ] Batch(165/243) done. Loss: 0.3791 lr:0.100000 network_time: 0.0277
289
+ [ Wed Sep 14 21:28:34 2022 ] Eval epoch: 39
290
+ [ Wed Sep 14 21:30:08 2022 ] Mean test loss of 796 batches: 3.3219752311706543.
291
+ [ Wed Sep 14 21:30:09 2022 ] Top1: 37.83%
292
+ [ Wed Sep 14 21:30:10 2022 ] Top5: 71.83%
293
+ [ Wed Sep 14 21:30:10 2022 ] Training epoch: 40
294
+ [ Wed Sep 14 21:30:29 2022 ] Batch(22/243) done. Loss: 0.2710 lr:0.100000 network_time: 0.0310
295
+ [ Wed Sep 14 21:31:42 2022 ] Batch(122/243) done. Loss: 0.2538 lr:0.100000 network_time: 0.0274
296
+ [ Wed Sep 14 21:32:54 2022 ] Batch(222/243) done. Loss: 0.2920 lr:0.100000 network_time: 0.0306
297
+ [ Wed Sep 14 21:33:09 2022 ] Eval epoch: 40
298
+ [ Wed Sep 14 21:34:43 2022 ] Mean test loss of 796 batches: 3.5191311836242676.
299
+ [ Wed Sep 14 21:34:44 2022 ] Top1: 38.85%
300
+ [ Wed Sep 14 21:34:44 2022 ] Top5: 71.95%
301
+ [ Wed Sep 14 21:34:45 2022 ] Training epoch: 41
302
+ [ Wed Sep 14 21:35:45 2022 ] Batch(79/243) done. Loss: 0.3196 lr:0.100000 network_time: 0.0346
303
+ [ Wed Sep 14 21:36:58 2022 ] Batch(179/243) done. Loss: 0.3180 lr:0.100000 network_time: 0.0276
304
+ [ Wed Sep 14 21:37:44 2022 ] Eval epoch: 41
305
+ [ Wed Sep 14 21:39:19 2022 ] Mean test loss of 796 batches: 2.699187994003296.
306
+ [ Wed Sep 14 21:39:19 2022 ] Top1: 44.14%
307
+ [ Wed Sep 14 21:39:20 2022 ] Top5: 76.58%
308
+ [ Wed Sep 14 21:39:20 2022 ] Training epoch: 42
309
+ [ Wed Sep 14 21:39:49 2022 ] Batch(36/243) done. Loss: 0.3361 lr:0.100000 network_time: 0.0279
310
+ [ Wed Sep 14 21:41:02 2022 ] Batch(136/243) done. Loss: 0.3384 lr:0.100000 network_time: 0.0274
311
+ [ Wed Sep 14 21:42:15 2022 ] Batch(236/243) done. Loss: 0.3603 lr:0.100000 network_time: 0.0276
312
+ [ Wed Sep 14 21:42:19 2022 ] Eval epoch: 42
313
+ [ Wed Sep 14 21:43:54 2022 ] Mean test loss of 796 batches: 2.974973678588867.
314
+ [ Wed Sep 14 21:43:54 2022 ] Top1: 41.72%
315
+ [ Wed Sep 14 21:43:55 2022 ] Top5: 74.08%
316
+ [ Wed Sep 14 21:43:55 2022 ] Training epoch: 43
317
+ [ Wed Sep 14 21:45:06 2022 ] Batch(93/243) done. Loss: 0.1706 lr:0.100000 network_time: 0.0312
318
+ [ Wed Sep 14 21:46:19 2022 ] Batch(193/243) done. Loss: 0.4081 lr:0.100000 network_time: 0.0270
319
+ [ Wed Sep 14 21:46:54 2022 ] Eval epoch: 43
320
+ [ Wed Sep 14 21:48:28 2022 ] Mean test loss of 796 batches: 3.3875744342803955.
321
+ [ Wed Sep 14 21:48:29 2022 ] Top1: 39.21%
322
+ [ Wed Sep 14 21:48:29 2022 ] Top5: 71.23%
323
+ [ Wed Sep 14 21:48:29 2022 ] Training epoch: 44
324
+ [ Wed Sep 14 21:49:09 2022 ] Batch(50/243) done. Loss: 0.2592 lr:0.100000 network_time: 0.0343
325
+ [ Wed Sep 14 21:50:22 2022 ] Batch(150/243) done. Loss: 0.3864 lr:0.100000 network_time: 0.0309
326
+ [ Wed Sep 14 21:51:29 2022 ] Eval epoch: 44
327
+ [ Wed Sep 14 21:53:03 2022 ] Mean test loss of 796 batches: 2.42459774017334.
328
+ [ Wed Sep 14 21:53:04 2022 ] Top1: 45.90%
329
+ [ Wed Sep 14 21:53:04 2022 ] Top5: 78.51%
330
+ [ Wed Sep 14 21:53:04 2022 ] Training epoch: 45
331
+ [ Wed Sep 14 21:53:12 2022 ] Batch(7/243) done. Loss: 0.2629 lr:0.100000 network_time: 0.0267
332
+ [ Wed Sep 14 21:54:25 2022 ] Batch(107/243) done. Loss: 0.3106 lr:0.100000 network_time: 0.0310
333
+ [ Wed Sep 14 21:55:37 2022 ] Batch(207/243) done. Loss: 0.3254 lr:0.100000 network_time: 0.0313
334
+ [ Wed Sep 14 21:56:03 2022 ] Eval epoch: 45
335
+ [ Wed Sep 14 21:57:38 2022 ] Mean test loss of 796 batches: 2.780808925628662.
336
+ [ Wed Sep 14 21:57:38 2022 ] Top1: 41.82%
337
+ [ Wed Sep 14 21:57:39 2022 ] Top5: 75.46%
338
+ [ Wed Sep 14 21:57:39 2022 ] Training epoch: 46
339
+ [ Wed Sep 14 21:58:29 2022 ] Batch(64/243) done. Loss: 0.1354 lr:0.100000 network_time: 0.0274
340
+ [ Wed Sep 14 21:59:42 2022 ] Batch(164/243) done. Loss: 0.3815 lr:0.100000 network_time: 0.0284
341
+ [ Wed Sep 14 22:00:38 2022 ] Eval epoch: 46
342
+ [ Wed Sep 14 22:02:13 2022 ] Mean test loss of 796 batches: 2.7003347873687744.
343
+ [ Wed Sep 14 22:02:13 2022 ] Top1: 45.25%
344
+ [ Wed Sep 14 22:02:13 2022 ] Top5: 76.78%
345
+ [ Wed Sep 14 22:02:14 2022 ] Training epoch: 47
346
+ [ Wed Sep 14 22:02:32 2022 ] Batch(21/243) done. Loss: 0.1768 lr:0.100000 network_time: 0.0306
347
+ [ Wed Sep 14 22:03:45 2022 ] Batch(121/243) done. Loss: 0.2490 lr:0.100000 network_time: 0.0309
348
+ [ Wed Sep 14 22:04:57 2022 ] Batch(221/243) done. Loss: 0.2552 lr:0.100000 network_time: 0.0270
349
+ [ Wed Sep 14 22:05:13 2022 ] Eval epoch: 47
350
+ [ Wed Sep 14 22:06:47 2022 ] Mean test loss of 796 batches: 2.9509265422821045.
351
+ [ Wed Sep 14 22:06:48 2022 ] Top1: 44.42%
352
+ [ Wed Sep 14 22:06:48 2022 ] Top5: 76.73%
353
+ [ Wed Sep 14 22:06:48 2022 ] Training epoch: 48
354
+ [ Wed Sep 14 22:07:49 2022 ] Batch(78/243) done. Loss: 0.3004 lr:0.100000 network_time: 0.0286
355
+ [ Wed Sep 14 22:09:01 2022 ] Batch(178/243) done. Loss: 0.5320 lr:0.100000 network_time: 0.0322
356
+ [ Wed Sep 14 22:09:48 2022 ] Eval epoch: 48
357
+ [ Wed Sep 14 22:11:22 2022 ] Mean test loss of 796 batches: 2.4312541484832764.
358
+ [ Wed Sep 14 22:11:23 2022 ] Top1: 47.98%
359
+ [ Wed Sep 14 22:11:23 2022 ] Top5: 79.85%
360
+ [ Wed Sep 14 22:11:23 2022 ] Training epoch: 49
361
+ [ Wed Sep 14 22:11:52 2022 ] Batch(35/243) done. Loss: 0.2771 lr:0.100000 network_time: 0.0266
362
+ [ Wed Sep 14 22:13:05 2022 ] Batch(135/243) done. Loss: 0.2653 lr:0.100000 network_time: 0.0279
363
+ [ Wed Sep 14 22:14:18 2022 ] Batch(235/243) done. Loss: 0.4622 lr:0.100000 network_time: 0.0278
364
+ [ Wed Sep 14 22:14:23 2022 ] Eval epoch: 49
365
+ [ Wed Sep 14 22:15:57 2022 ] Mean test loss of 796 batches: 2.64970064163208.
366
+ [ Wed Sep 14 22:15:58 2022 ] Top1: 44.03%
367
+ [ Wed Sep 14 22:15:58 2022 ] Top5: 77.05%
368
+ [ Wed Sep 14 22:15:58 2022 ] Training epoch: 50
369
+ [ Wed Sep 14 22:17:08 2022 ] Batch(92/243) done. Loss: 0.3453 lr:0.100000 network_time: 0.0259
370
+ [ Wed Sep 14 22:18:21 2022 ] Batch(192/243) done. Loss: 0.3537 lr:0.100000 network_time: 0.0268
371
+ [ Wed Sep 14 22:18:58 2022 ] Eval epoch: 50
372
+ [ Wed Sep 14 22:20:32 2022 ] Mean test loss of 796 batches: 2.8408055305480957.
373
+ [ Wed Sep 14 22:20:32 2022 ] Top1: 46.56%
374
+ [ Wed Sep 14 22:20:33 2022 ] Top5: 77.21%
375
+ [ Wed Sep 14 22:20:33 2022 ] Training epoch: 51
376
+ [ Wed Sep 14 22:21:12 2022 ] Batch(49/243) done. Loss: 0.2958 lr:0.100000 network_time: 0.0264
377
+ [ Wed Sep 14 22:22:25 2022 ] Batch(149/243) done. Loss: 0.3390 lr:0.100000 network_time: 0.0269
378
+ [ Wed Sep 14 22:23:32 2022 ] Eval epoch: 51
379
+ [ Wed Sep 14 22:25:07 2022 ] Mean test loss of 796 batches: 2.830892324447632.
380
+ [ Wed Sep 14 22:25:07 2022 ] Top1: 44.75%
381
+ [ Wed Sep 14 22:25:08 2022 ] Top5: 76.30%
382
+ [ Wed Sep 14 22:25:08 2022 ] Training epoch: 52
383
+ [ Wed Sep 14 22:25:15 2022 ] Batch(6/243) done. Loss: 0.3292 lr:0.100000 network_time: 0.0263
384
+ [ Wed Sep 14 22:26:28 2022 ] Batch(106/243) done. Loss: 0.2284 lr:0.100000 network_time: 0.0329
385
+ [ Wed Sep 14 22:27:41 2022 ] Batch(206/243) done. Loss: 0.3900 lr:0.100000 network_time: 0.0271
386
+ [ Wed Sep 14 22:28:07 2022 ] Eval epoch: 52
387
+ [ Wed Sep 14 22:29:42 2022 ] Mean test loss of 796 batches: 2.779292583465576.
388
+ [ Wed Sep 14 22:29:42 2022 ] Top1: 43.91%
389
+ [ Wed Sep 14 22:29:42 2022 ] Top5: 76.34%
390
+ [ Wed Sep 14 22:29:43 2022 ] Training epoch: 53
391
+ [ Wed Sep 14 22:30:32 2022 ] Batch(63/243) done. Loss: 0.1874 lr:0.100000 network_time: 0.0328
392
+ [ Wed Sep 14 22:31:45 2022 ] Batch(163/243) done. Loss: 0.3138 lr:0.100000 network_time: 0.0266
393
+ [ Wed Sep 14 22:32:42 2022 ] Eval epoch: 53
394
+ [ Wed Sep 14 22:34:16 2022 ] Mean test loss of 796 batches: 2.5557665824890137.
395
+ [ Wed Sep 14 22:34:17 2022 ] Top1: 47.09%
396
+ [ Wed Sep 14 22:34:17 2022 ] Top5: 79.82%
397
+ [ Wed Sep 14 22:34:18 2022 ] Training epoch: 54
398
+ [ Wed Sep 14 22:34:35 2022 ] Batch(20/243) done. Loss: 0.3232 lr:0.100000 network_time: 0.0292
399
+ [ Wed Sep 14 22:35:48 2022 ] Batch(120/243) done. Loss: 0.2636 lr:0.100000 network_time: 0.0275
400
+ [ Wed Sep 14 22:37:01 2022 ] Batch(220/243) done. Loss: 0.4322 lr:0.100000 network_time: 0.0311
401
+ [ Wed Sep 14 22:37:17 2022 ] Eval epoch: 54
402
+ [ Wed Sep 14 22:38:51 2022 ] Mean test loss of 796 batches: 2.8570163249969482.
403
+ [ Wed Sep 14 22:38:52 2022 ] Top1: 44.39%
404
+ [ Wed Sep 14 22:38:52 2022 ] Top5: 76.48%
405
+ [ Wed Sep 14 22:38:52 2022 ] Training epoch: 55
406
+ [ Wed Sep 14 22:39:52 2022 ] Batch(77/243) done. Loss: 0.3885 lr:0.100000 network_time: 0.0258
407
+ [ Wed Sep 14 22:41:04 2022 ] Batch(177/243) done. Loss: 0.3764 lr:0.100000 network_time: 0.0309
408
+ [ Wed Sep 14 22:41:52 2022 ] Eval epoch: 55
409
+ [ Wed Sep 14 22:43:26 2022 ] Mean test loss of 796 batches: 2.8412747383117676.
410
+ [ Wed Sep 14 22:43:26 2022 ] Top1: 45.56%
411
+ [ Wed Sep 14 22:43:27 2022 ] Top5: 77.42%
412
+ [ Wed Sep 14 22:43:27 2022 ] Training epoch: 56
413
+ [ Wed Sep 14 22:43:55 2022 ] Batch(34/243) done. Loss: 0.2089 lr:0.100000 network_time: 0.0321
414
+ [ Wed Sep 14 22:45:07 2022 ] Batch(134/243) done. Loss: 0.4471 lr:0.100000 network_time: 0.0274
415
+ [ Wed Sep 14 22:46:20 2022 ] Batch(234/243) done. Loss: 0.3233 lr:0.100000 network_time: 0.0344
416
+ [ Wed Sep 14 22:46:26 2022 ] Eval epoch: 56
417
+ [ Wed Sep 14 22:48:00 2022 ] Mean test loss of 796 batches: 3.0499653816223145.
418
+ [ Wed Sep 14 22:48:01 2022 ] Top1: 42.66%
419
+ [ Wed Sep 14 22:48:01 2022 ] Top5: 74.60%
420
+ [ Wed Sep 14 22:48:01 2022 ] Training epoch: 57
421
+ [ Wed Sep 14 22:49:11 2022 ] Batch(91/243) done. Loss: 0.4092 lr:0.100000 network_time: 0.0315
422
+ [ Wed Sep 14 22:50:24 2022 ] Batch(191/243) done. Loss: 0.2308 lr:0.100000 network_time: 0.0271
423
+ [ Wed Sep 14 22:51:01 2022 ] Eval epoch: 57
424
+ [ Wed Sep 14 22:52:35 2022 ] Mean test loss of 796 batches: 3.305689811706543.
425
+ [ Wed Sep 14 22:52:36 2022 ] Top1: 41.40%
426
+ [ Wed Sep 14 22:52:36 2022 ] Top5: 73.17%
427
+ [ Wed Sep 14 22:52:36 2022 ] Training epoch: 58
428
+ [ Wed Sep 14 22:53:15 2022 ] Batch(48/243) done. Loss: 0.2153 lr:0.100000 network_time: 0.0272
429
+ [ Wed Sep 14 22:54:27 2022 ] Batch(148/243) done. Loss: 0.1952 lr:0.100000 network_time: 0.0259
430
+ [ Wed Sep 14 22:55:36 2022 ] Eval epoch: 58
431
+ [ Wed Sep 14 22:57:10 2022 ] Mean test loss of 796 batches: 2.48647403717041.
432
+ [ Wed Sep 14 22:57:10 2022 ] Top1: 47.80%
433
+ [ Wed Sep 14 22:57:11 2022 ] Top5: 78.56%
434
+ [ Wed Sep 14 22:57:11 2022 ] Training epoch: 59
435
+ [ Wed Sep 14 22:57:18 2022 ] Batch(5/243) done. Loss: 0.2518 lr:0.100000 network_time: 0.0274
436
+ [ Wed Sep 14 22:58:30 2022 ] Batch(105/243) done. Loss: 0.3340 lr:0.100000 network_time: 0.0277
437
+ [ Wed Sep 14 22:59:43 2022 ] Batch(205/243) done. Loss: 0.2099 lr:0.100000 network_time: 0.0283
438
+ [ Wed Sep 14 23:00:10 2022 ] Eval epoch: 59
439
+ [ Wed Sep 14 23:01:45 2022 ] Mean test loss of 796 batches: 3.046830177307129.
440
+ [ Wed Sep 14 23:01:46 2022 ] Top1: 43.71%
441
+ [ Wed Sep 14 23:01:46 2022 ] Top5: 76.29%
442
+ [ Wed Sep 14 23:01:46 2022 ] Training epoch: 60
443
+ [ Wed Sep 14 23:02:34 2022 ] Batch(62/243) done. Loss: 0.2846 lr:0.100000 network_time: 0.0267
444
+ [ Wed Sep 14 23:03:47 2022 ] Batch(162/243) done. Loss: 0.2350 lr:0.100000 network_time: 0.0270
445
+ [ Wed Sep 14 23:04:45 2022 ] Eval epoch: 60
446
+ [ Wed Sep 14 23:06:19 2022 ] Mean test loss of 796 batches: 2.672731399536133.
447
+ [ Wed Sep 14 23:06:20 2022 ] Top1: 45.65%
448
+ [ Wed Sep 14 23:06:20 2022 ] Top5: 77.72%
449
+ [ Wed Sep 14 23:06:21 2022 ] Training epoch: 61
450
+ [ Wed Sep 14 23:06:38 2022 ] Batch(19/243) done. Loss: 0.2963 lr:0.010000 network_time: 0.0297
451
+ [ Wed Sep 14 23:07:50 2022 ] Batch(119/243) done. Loss: 0.1329 lr:0.010000 network_time: 0.0278
452
+ [ Wed Sep 14 23:09:03 2022 ] Batch(219/243) done. Loss: 0.0661 lr:0.010000 network_time: 0.0258
453
+ [ Wed Sep 14 23:09:20 2022 ] Eval epoch: 61
454
+ [ Wed Sep 14 23:10:54 2022 ] Mean test loss of 796 batches: 2.2649903297424316.
455
+ [ Wed Sep 14 23:10:55 2022 ] Top1: 52.45%
456
+ [ Wed Sep 14 23:10:55 2022 ] Top5: 82.87%
457
+ [ Wed Sep 14 23:10:55 2022 ] Training epoch: 62
458
+ [ Wed Sep 14 23:11:54 2022 ] Batch(76/243) done. Loss: 0.0351 lr:0.010000 network_time: 0.0317
459
+ [ Wed Sep 14 23:13:07 2022 ] Batch(176/243) done. Loss: 0.1236 lr:0.010000 network_time: 0.0274
460
+ [ Wed Sep 14 23:13:55 2022 ] Eval epoch: 62
461
+ [ Wed Sep 14 23:15:29 2022 ] Mean test loss of 796 batches: 2.304346799850464.
462
+ [ Wed Sep 14 23:15:30 2022 ] Top1: 52.60%
463
+ [ Wed Sep 14 23:15:30 2022 ] Top5: 82.88%
464
+ [ Wed Sep 14 23:15:30 2022 ] Training epoch: 63
465
+ [ Wed Sep 14 23:15:58 2022 ] Batch(33/243) done. Loss: 0.0665 lr:0.010000 network_time: 0.0267
466
+ [ Wed Sep 14 23:17:10 2022 ] Batch(133/243) done. Loss: 0.0376 lr:0.010000 network_time: 0.0266
467
+ [ Wed Sep 14 23:18:23 2022 ] Batch(233/243) done. Loss: 0.0498 lr:0.010000 network_time: 0.0271
468
+ [ Wed Sep 14 23:18:30 2022 ] Eval epoch: 63
469
+ [ Wed Sep 14 23:20:04 2022 ] Mean test loss of 796 batches: 2.3135626316070557.
470
+ [ Wed Sep 14 23:20:05 2022 ] Top1: 53.06%
471
+ [ Wed Sep 14 23:20:05 2022 ] Top5: 83.14%
472
+ [ Wed Sep 14 23:20:05 2022 ] Training epoch: 64
473
+ [ Wed Sep 14 23:21:14 2022 ] Batch(90/243) done. Loss: 0.0455 lr:0.010000 network_time: 0.0272
474
+ [ Wed Sep 14 23:22:27 2022 ] Batch(190/243) done. Loss: 0.0182 lr:0.010000 network_time: 0.0251
475
+ [ Wed Sep 14 23:23:05 2022 ] Eval epoch: 64
476
+ [ Wed Sep 14 23:24:39 2022 ] Mean test loss of 796 batches: 2.324078321456909.
477
+ [ Wed Sep 14 23:24:40 2022 ] Top1: 53.30%
478
+ [ Wed Sep 14 23:24:40 2022 ] Top5: 83.26%
479
+ [ Wed Sep 14 23:24:41 2022 ] Training epoch: 65
480
+ [ Wed Sep 14 23:25:18 2022 ] Batch(47/243) done. Loss: 0.0222 lr:0.010000 network_time: 0.0284
481
+ [ Wed Sep 14 23:26:31 2022 ] Batch(147/243) done. Loss: 0.1275 lr:0.010000 network_time: 0.0282
482
+ [ Wed Sep 14 23:27:40 2022 ] Eval epoch: 65
483
+ [ Wed Sep 14 23:29:14 2022 ] Mean test loss of 796 batches: 2.2940661907196045.
484
+ [ Wed Sep 14 23:29:15 2022 ] Top1: 53.94%
485
+ [ Wed Sep 14 23:29:15 2022 ] Top5: 83.59%
486
+ [ Wed Sep 14 23:29:15 2022 ] Training epoch: 66
487
+ [ Wed Sep 14 23:29:21 2022 ] Batch(4/243) done. Loss: 0.0503 lr:0.010000 network_time: 0.0273
488
+ [ Wed Sep 14 23:30:34 2022 ] Batch(104/243) done. Loss: 0.0199 lr:0.010000 network_time: 0.0265
489
+ [ Wed Sep 14 23:31:47 2022 ] Batch(204/243) done. Loss: 0.0553 lr:0.010000 network_time: 0.0272
490
+ [ Wed Sep 14 23:32:15 2022 ] Eval epoch: 66
491
+ [ Wed Sep 14 23:33:49 2022 ] Mean test loss of 796 batches: 2.326700448989868.
492
+ [ Wed Sep 14 23:33:50 2022 ] Top1: 53.68%
493
+ [ Wed Sep 14 23:33:50 2022 ] Top5: 83.44%
494
+ [ Wed Sep 14 23:33:50 2022 ] Training epoch: 67
495
+ [ Wed Sep 14 23:34:38 2022 ] Batch(61/243) done. Loss: 0.0315 lr:0.010000 network_time: 0.0284
496
+ [ Wed Sep 14 23:35:51 2022 ] Batch(161/243) done. Loss: 0.0202 lr:0.010000 network_time: 0.0287
497
+ [ Wed Sep 14 23:36:50 2022 ] Eval epoch: 67
498
+ [ Wed Sep 14 23:38:24 2022 ] Mean test loss of 796 batches: 2.369642972946167.
499
+ [ Wed Sep 14 23:38:24 2022 ] Top1: 53.27%
500
+ [ Wed Sep 14 23:38:25 2022 ] Top5: 83.38%
501
+ [ Wed Sep 14 23:38:25 2022 ] Training epoch: 68
502
+ [ Wed Sep 14 23:38:41 2022 ] Batch(18/243) done. Loss: 0.0330 lr:0.010000 network_time: 0.0361
503
+ [ Wed Sep 14 23:39:54 2022 ] Batch(118/243) done. Loss: 0.0168 lr:0.010000 network_time: 0.0333
504
+ [ Wed Sep 14 23:41:07 2022 ] Batch(218/243) done. Loss: 0.0458 lr:0.010000 network_time: 0.0287
505
+ [ Wed Sep 14 23:41:24 2022 ] Eval epoch: 68
506
+ [ Wed Sep 14 23:42:58 2022 ] Mean test loss of 796 batches: 2.3233730792999268.
507
+ [ Wed Sep 14 23:42:58 2022 ] Top1: 53.88%
508
+ [ Wed Sep 14 23:42:59 2022 ] Top5: 83.64%
509
+ [ Wed Sep 14 23:42:59 2022 ] Training epoch: 69
510
+ [ Wed Sep 14 23:43:57 2022 ] Batch(75/243) done. Loss: 0.0249 lr:0.010000 network_time: 0.0281
511
+ [ Wed Sep 14 23:45:10 2022 ] Batch(175/243) done. Loss: 0.0364 lr:0.010000 network_time: 0.0272
512
+ [ Wed Sep 14 23:45:59 2022 ] Eval epoch: 69
513
+ [ Wed Sep 14 23:47:33 2022 ] Mean test loss of 796 batches: 2.3240044116973877.
514
+ [ Wed Sep 14 23:47:34 2022 ] Top1: 53.81%
515
+ [ Wed Sep 14 23:47:34 2022 ] Top5: 83.58%
516
+ [ Wed Sep 14 23:47:34 2022 ] Training epoch: 70
517
+ [ Wed Sep 14 23:48:00 2022 ] Batch(32/243) done. Loss: 0.0142 lr:0.010000 network_time: 0.0316
518
+ [ Wed Sep 14 23:49:13 2022 ] Batch(132/243) done. Loss: 0.0259 lr:0.010000 network_time: 0.0279
519
+ [ Wed Sep 14 23:50:26 2022 ] Batch(232/243) done. Loss: 0.0082 lr:0.010000 network_time: 0.0283
520
+ [ Wed Sep 14 23:50:33 2022 ] Eval epoch: 70
521
+ [ Wed Sep 14 23:52:08 2022 ] Mean test loss of 796 batches: 2.3367578983306885.
522
+ [ Wed Sep 14 23:52:08 2022 ] Top1: 54.09%
523
+ [ Wed Sep 14 23:52:09 2022 ] Top5: 83.63%
524
+ [ Wed Sep 14 23:52:09 2022 ] Training epoch: 71
525
+ [ Wed Sep 14 23:53:17 2022 ] Batch(89/243) done. Loss: 0.0175 lr:0.010000 network_time: 0.0269
526
+ [ Wed Sep 14 23:54:30 2022 ] Batch(189/243) done. Loss: 0.0308 lr:0.010000 network_time: 0.0320
527
+ [ Wed Sep 14 23:55:08 2022 ] Eval epoch: 71
528
+ [ Wed Sep 14 23:56:43 2022 ] Mean test loss of 796 batches: 2.4088292121887207.
529
+ [ Wed Sep 14 23:56:43 2022 ] Top1: 52.97%
530
+ [ Wed Sep 14 23:56:43 2022 ] Top5: 82.94%
531
+ [ Wed Sep 14 23:56:43 2022 ] Training epoch: 72
532
+ [ Wed Sep 14 23:57:20 2022 ] Batch(46/243) done. Loss: 0.0128 lr:0.010000 network_time: 0.0322
533
+ [ Wed Sep 14 23:58:33 2022 ] Batch(146/243) done. Loss: 0.0299 lr:0.010000 network_time: 0.0311
534
+ [ Wed Sep 14 23:59:43 2022 ] Eval epoch: 72
535
+ [ Thu Sep 15 00:01:17 2022 ] Mean test loss of 796 batches: 2.378607749938965.
536
+ [ Thu Sep 15 00:01:18 2022 ] Top1: 53.90%
537
+ [ Thu Sep 15 00:01:18 2022 ] Top5: 83.48%
538
+ [ Thu Sep 15 00:01:18 2022 ] Training epoch: 73
539
+ [ Thu Sep 15 00:01:24 2022 ] Batch(3/243) done. Loss: 0.0092 lr:0.010000 network_time: 0.0333
540
+ [ Thu Sep 15 00:02:36 2022 ] Batch(103/243) done. Loss: 0.0151 lr:0.010000 network_time: 0.0275
541
+ [ Thu Sep 15 00:03:49 2022 ] Batch(203/243) done. Loss: 0.0208 lr:0.010000 network_time: 0.0284
542
+ [ Thu Sep 15 00:04:18 2022 ] Eval epoch: 73
543
+ [ Thu Sep 15 00:05:52 2022 ] Mean test loss of 796 batches: 2.3335354328155518.
544
+ [ Thu Sep 15 00:05:53 2022 ] Top1: 54.40%
545
+ [ Thu Sep 15 00:05:53 2022 ] Top5: 83.74%
546
+ [ Thu Sep 15 00:05:53 2022 ] Training epoch: 74
547
+ [ Thu Sep 15 00:06:40 2022 ] Batch(60/243) done. Loss: 0.0226 lr:0.010000 network_time: 0.0270
548
+ [ Thu Sep 15 00:07:53 2022 ] Batch(160/243) done. Loss: 0.0229 lr:0.010000 network_time: 0.0280
549
+ [ Thu Sep 15 00:08:53 2022 ] Eval epoch: 74
550
+ [ Thu Sep 15 00:10:27 2022 ] Mean test loss of 796 batches: 2.417653799057007.
551
+ [ Thu Sep 15 00:10:27 2022 ] Top1: 53.48%
552
+ [ Thu Sep 15 00:10:28 2022 ] Top5: 83.25%
553
+ [ Thu Sep 15 00:10:29 2022 ] Training epoch: 75
554
+ [ Thu Sep 15 00:10:44 2022 ] Batch(17/243) done. Loss: 0.0171 lr:0.010000 network_time: 0.0279
555
+ [ Thu Sep 15 00:11:57 2022 ] Batch(117/243) done. Loss: 0.0161 lr:0.010000 network_time: 0.0274
556
+ [ Thu Sep 15 00:13:10 2022 ] Batch(217/243) done. Loss: 0.0076 lr:0.010000 network_time: 0.0333
557
+ [ Thu Sep 15 00:13:28 2022 ] Eval epoch: 75
558
+ [ Thu Sep 15 00:15:02 2022 ] Mean test loss of 796 batches: 2.3814995288848877.
559
+ [ Thu Sep 15 00:15:02 2022 ] Top1: 53.89%
560
+ [ Thu Sep 15 00:15:03 2022 ] Top5: 83.51%
561
+ [ Thu Sep 15 00:15:03 2022 ] Training epoch: 76
562
+ [ Thu Sep 15 00:16:00 2022 ] Batch(74/243) done. Loss: 0.0306 lr:0.010000 network_time: 0.0282
563
+ [ Thu Sep 15 00:17:13 2022 ] Batch(174/243) done. Loss: 0.0138 lr:0.010000 network_time: 0.0271
564
+ [ Thu Sep 15 00:18:02 2022 ] Eval epoch: 76
565
+ [ Thu Sep 15 00:19:36 2022 ] Mean test loss of 796 batches: 2.4096171855926514.
566
+ [ Thu Sep 15 00:19:37 2022 ] Top1: 53.70%
567
+ [ Thu Sep 15 00:19:37 2022 ] Top5: 83.48%
568
+ [ Thu Sep 15 00:19:37 2022 ] Training epoch: 77
569
+ [ Thu Sep 15 00:20:03 2022 ] Batch(31/243) done. Loss: 0.0179 lr:0.010000 network_time: 0.0348
570
+ [ Thu Sep 15 00:21:16 2022 ] Batch(131/243) done. Loss: 0.0148 lr:0.010000 network_time: 0.0276
571
+ [ Thu Sep 15 00:22:29 2022 ] Batch(231/243) done. Loss: 0.0238 lr:0.010000 network_time: 0.0473
572
+ [ Thu Sep 15 00:22:37 2022 ] Eval epoch: 77
573
+ [ Thu Sep 15 00:24:11 2022 ] Mean test loss of 796 batches: 2.3831140995025635.
574
+ [ Thu Sep 15 00:24:11 2022 ] Top1: 54.25%
575
+ [ Thu Sep 15 00:24:12 2022 ] Top5: 83.61%
576
+ [ Thu Sep 15 00:24:12 2022 ] Training epoch: 78
577
+ [ Thu Sep 15 00:25:19 2022 ] Batch(88/243) done. Loss: 0.0447 lr:0.010000 network_time: 0.0364
578
+ [ Thu Sep 15 00:26:32 2022 ] Batch(188/243) done. Loss: 0.0100 lr:0.010000 network_time: 0.0273
579
+ [ Thu Sep 15 00:27:12 2022 ] Eval epoch: 78
580
+ [ Thu Sep 15 00:28:46 2022 ] Mean test loss of 796 batches: 2.429424285888672.
581
+ [ Thu Sep 15 00:28:46 2022 ] Top1: 53.82%
582
+ [ Thu Sep 15 00:28:47 2022 ] Top5: 83.37%
583
+ [ Thu Sep 15 00:28:47 2022 ] Training epoch: 79
584
+ [ Thu Sep 15 00:29:23 2022 ] Batch(45/243) done. Loss: 0.0190 lr:0.010000 network_time: 0.0356
585
+ [ Thu Sep 15 00:30:36 2022 ] Batch(145/243) done. Loss: 0.0106 lr:0.010000 network_time: 0.0277
586
+ [ Thu Sep 15 00:31:47 2022 ] Eval epoch: 79
587
+ [ Thu Sep 15 00:33:21 2022 ] Mean test loss of 796 batches: 2.470181465148926.
588
+ [ Thu Sep 15 00:33:21 2022 ] Top1: 53.71%
589
+ [ Thu Sep 15 00:33:22 2022 ] Top5: 83.12%
590
+ [ Thu Sep 15 00:33:22 2022 ] Training epoch: 80
591
+ [ Thu Sep 15 00:33:27 2022 ] Batch(2/243) done. Loss: 0.0179 lr:0.010000 network_time: 0.0323
592
+ [ Thu Sep 15 00:34:39 2022 ] Batch(102/243) done. Loss: 0.0104 lr:0.010000 network_time: 0.0280
593
+ [ Thu Sep 15 00:35:52 2022 ] Batch(202/243) done. Loss: 0.0254 lr:0.010000 network_time: 0.0310
594
+ [ Thu Sep 15 00:36:21 2022 ] Eval epoch: 80
595
+ [ Thu Sep 15 00:37:56 2022 ] Mean test loss of 796 batches: 2.436082601547241.
596
+ [ Thu Sep 15 00:37:57 2022 ] Top1: 53.77%
597
+ [ Thu Sep 15 00:37:57 2022 ] Top5: 83.38%
598
+ [ Thu Sep 15 00:37:58 2022 ] Training epoch: 81
599
+ [ Thu Sep 15 00:38:44 2022 ] Batch(59/243) done. Loss: 0.0275 lr:0.001000 network_time: 0.0314
600
+ [ Thu Sep 15 00:39:56 2022 ] Batch(159/243) done. Loss: 0.0067 lr:0.001000 network_time: 0.0274
601
+ [ Thu Sep 15 00:40:57 2022 ] Eval epoch: 81
602
+ [ Thu Sep 15 00:42:31 2022 ] Mean test loss of 796 batches: 2.405709981918335.
603
+ [ Thu Sep 15 00:42:32 2022 ] Top1: 53.71%
604
+ [ Thu Sep 15 00:42:33 2022 ] Top5: 83.41%
605
+ [ Thu Sep 15 00:42:33 2022 ] Training epoch: 82
606
+ [ Thu Sep 15 00:42:48 2022 ] Batch(16/243) done. Loss: 0.0293 lr:0.001000 network_time: 0.0645
607
+ [ Thu Sep 15 00:44:00 2022 ] Batch(116/243) done. Loss: 0.0058 lr:0.001000 network_time: 0.0276
608
+ [ Thu Sep 15 00:45:13 2022 ] Batch(216/243) done. Loss: 0.0168 lr:0.001000 network_time: 0.0275
609
+ [ Thu Sep 15 00:45:32 2022 ] Eval epoch: 82
610
+ [ Thu Sep 15 00:47:07 2022 ] Mean test loss of 796 batches: 2.4298956394195557.
611
+ [ Thu Sep 15 00:47:07 2022 ] Top1: 53.66%
612
+ [ Thu Sep 15 00:47:07 2022 ] Top5: 83.36%
613
+ [ Thu Sep 15 00:47:08 2022 ] Training epoch: 83
614
+ [ Thu Sep 15 00:48:04 2022 ] Batch(73/243) done. Loss: 0.0350 lr:0.001000 network_time: 0.0306
615
+ [ Thu Sep 15 00:49:16 2022 ] Batch(173/243) done. Loss: 0.0067 lr:0.001000 network_time: 0.0314
616
+ [ Thu Sep 15 00:50:07 2022 ] Eval epoch: 83
617
+ [ Thu Sep 15 00:51:41 2022 ] Mean test loss of 796 batches: 2.4424381256103516.
618
+ [ Thu Sep 15 00:51:41 2022 ] Top1: 53.44%
619
+ [ Thu Sep 15 00:51:41 2022 ] Top5: 83.01%
620
+ [ Thu Sep 15 00:51:42 2022 ] Training epoch: 84
621
+ [ Thu Sep 15 00:52:07 2022 ] Batch(30/243) done. Loss: 0.0322 lr:0.001000 network_time: 0.0310
622
+ [ Thu Sep 15 00:53:19 2022 ] Batch(130/243) done. Loss: 0.0077 lr:0.001000 network_time: 0.0262
623
+ [ Thu Sep 15 00:54:32 2022 ] Batch(230/243) done. Loss: 0.0145 lr:0.001000 network_time: 0.0306
624
+ [ Thu Sep 15 00:54:41 2022 ] Eval epoch: 84
625
+ [ Thu Sep 15 00:56:16 2022 ] Mean test loss of 796 batches: 2.3991408348083496.
626
+ [ Thu Sep 15 00:56:16 2022 ] Top1: 54.34%
627
+ [ Thu Sep 15 00:56:16 2022 ] Top5: 83.64%
628
+ [ Thu Sep 15 00:56:17 2022 ] Training epoch: 85
629
+ [ Thu Sep 15 00:57:23 2022 ] Batch(87/243) done. Loss: 0.0150 lr:0.001000 network_time: 0.0257
630
+ [ Thu Sep 15 00:58:36 2022 ] Batch(187/243) done. Loss: 0.0192 lr:0.001000 network_time: 0.0315
631
+ [ Thu Sep 15 00:59:16 2022 ] Eval epoch: 85
632
+ [ Thu Sep 15 01:00:50 2022 ] Mean test loss of 796 batches: 2.388627290725708.
633
+ [ Thu Sep 15 01:00:51 2022 ] Top1: 54.17%
634
+ [ Thu Sep 15 01:00:51 2022 ] Top5: 83.64%
635
+ [ Thu Sep 15 01:00:51 2022 ] Training epoch: 86
636
+ [ Thu Sep 15 01:01:26 2022 ] Batch(44/243) done. Loss: 0.0126 lr:0.001000 network_time: 0.0272
637
+ [ Thu Sep 15 01:02:39 2022 ] Batch(144/243) done. Loss: 0.0083 lr:0.001000 network_time: 0.0273
638
+ [ Thu Sep 15 01:03:51 2022 ] Eval epoch: 86
639
+ [ Thu Sep 15 01:05:25 2022 ] Mean test loss of 796 batches: 2.4415183067321777.
640
+ [ Thu Sep 15 01:05:25 2022 ] Top1: 53.66%
641
+ [ Thu Sep 15 01:05:25 2022 ] Top5: 83.21%
642
+ [ Thu Sep 15 01:05:26 2022 ] Training epoch: 87
643
+ [ Thu Sep 15 01:05:30 2022 ] Batch(1/243) done. Loss: 0.0151 lr:0.001000 network_time: 0.0328
644
+ [ Thu Sep 15 01:06:42 2022 ] Batch(101/243) done. Loss: 0.0092 lr:0.001000 network_time: 0.0268
645
+ [ Thu Sep 15 01:07:55 2022 ] Batch(201/243) done. Loss: 0.0195 lr:0.001000 network_time: 0.0277
646
+ [ Thu Sep 15 01:08:25 2022 ] Eval epoch: 87
647
+ [ Thu Sep 15 01:09:59 2022 ] Mean test loss of 796 batches: 2.3931989669799805.
648
+ [ Thu Sep 15 01:09:59 2022 ] Top1: 54.22%
649
+ [ Thu Sep 15 01:10:00 2022 ] Top5: 83.68%
650
+ [ Thu Sep 15 01:10:00 2022 ] Training epoch: 88
651
+ [ Thu Sep 15 01:10:45 2022 ] Batch(58/243) done. Loss: 0.0041 lr:0.001000 network_time: 0.0272
652
+ [ Thu Sep 15 01:11:58 2022 ] Batch(158/243) done. Loss: 0.0124 lr:0.001000 network_time: 0.0261
653
+ [ Thu Sep 15 01:12:59 2022 ] Eval epoch: 88
654
+ [ Thu Sep 15 01:14:33 2022 ] Mean test loss of 796 batches: 2.4158730506896973.
655
+ [ Thu Sep 15 01:14:34 2022 ] Top1: 53.63%
656
+ [ Thu Sep 15 01:14:34 2022 ] Top5: 83.43%
657
+ [ Thu Sep 15 01:14:34 2022 ] Training epoch: 89
658
+ [ Thu Sep 15 01:14:49 2022 ] Batch(15/243) done. Loss: 0.0167 lr:0.001000 network_time: 0.0307
659
+ [ Thu Sep 15 01:16:01 2022 ] Batch(115/243) done. Loss: 0.0156 lr:0.001000 network_time: 0.0321
660
+ [ Thu Sep 15 01:17:14 2022 ] Batch(215/243) done. Loss: 0.0198 lr:0.001000 network_time: 0.0273
661
+ [ Thu Sep 15 01:17:34 2022 ] Eval epoch: 89
662
+ [ Thu Sep 15 01:19:08 2022 ] Mean test loss of 796 batches: 2.423422336578369.
663
+ [ Thu Sep 15 01:19:08 2022 ] Top1: 53.97%
664
+ [ Thu Sep 15 01:19:08 2022 ] Top5: 83.38%
665
+ [ Thu Sep 15 01:19:09 2022 ] Training epoch: 90
666
+ [ Thu Sep 15 01:20:04 2022 ] Batch(72/243) done. Loss: 0.0045 lr:0.001000 network_time: 0.0310
667
+ [ Thu Sep 15 01:21:17 2022 ] Batch(172/243) done. Loss: 0.0155 lr:0.001000 network_time: 0.0261
668
+ [ Thu Sep 15 01:22:08 2022 ] Eval epoch: 90
669
+ [ Thu Sep 15 01:23:42 2022 ] Mean test loss of 796 batches: 2.404895782470703.
670
+ [ Thu Sep 15 01:23:43 2022 ] Top1: 54.01%
671
+ [ Thu Sep 15 01:23:43 2022 ] Top5: 83.52%
672
+ [ Thu Sep 15 01:23:43 2022 ] Training epoch: 91
673
+ [ Thu Sep 15 01:24:08 2022 ] Batch(29/243) done. Loss: 0.0065 lr:0.001000 network_time: 0.0261
674
+ [ Thu Sep 15 01:25:21 2022 ] Batch(129/243) done. Loss: 0.0080 lr:0.001000 network_time: 0.0266
675
+ [ Thu Sep 15 01:26:33 2022 ] Batch(229/243) done. Loss: 0.0078 lr:0.001000 network_time: 0.0277
676
+ [ Thu Sep 15 01:26:43 2022 ] Eval epoch: 91
677
+ [ Thu Sep 15 01:28:17 2022 ] Mean test loss of 796 batches: 2.4108152389526367.
678
+ [ Thu Sep 15 01:28:17 2022 ] Top1: 54.08%
679
+ [ Thu Sep 15 01:28:18 2022 ] Top5: 83.47%
680
+ [ Thu Sep 15 01:28:18 2022 ] Training epoch: 92
681
+ [ Thu Sep 15 01:29:24 2022 ] Batch(86/243) done. Loss: 0.0084 lr:0.001000 network_time: 0.0262
682
+ [ Thu Sep 15 01:30:36 2022 ] Batch(186/243) done. Loss: 0.0131 lr:0.001000 network_time: 0.0280
683
+ [ Thu Sep 15 01:31:17 2022 ] Eval epoch: 92
684
+ [ Thu Sep 15 01:32:52 2022 ] Mean test loss of 796 batches: 2.399904727935791.
685
+ [ Thu Sep 15 01:32:52 2022 ] Top1: 54.41%
686
+ [ Thu Sep 15 01:32:53 2022 ] Top5: 83.67%
687
+ [ Thu Sep 15 01:32:53 2022 ] Training epoch: 93
688
+ [ Thu Sep 15 01:33:27 2022 ] Batch(43/243) done. Loss: 0.0037 lr:0.001000 network_time: 0.0279
689
+ [ Thu Sep 15 01:34:40 2022 ] Batch(143/243) done. Loss: 0.0218 lr:0.001000 network_time: 0.0304
690
+ [ Thu Sep 15 01:35:52 2022 ] Eval epoch: 93
691
+ [ Thu Sep 15 01:37:26 2022 ] Mean test loss of 796 batches: 2.442288398742676.
692
+ [ Thu Sep 15 01:37:26 2022 ] Top1: 53.80%
693
+ [ Thu Sep 15 01:37:27 2022 ] Top5: 83.27%
694
+ [ Thu Sep 15 01:37:27 2022 ] Training epoch: 94
695
+ [ Thu Sep 15 01:37:30 2022 ] Batch(0/243) done. Loss: 0.0418 lr:0.001000 network_time: 0.0714
696
+ [ Thu Sep 15 01:38:43 2022 ] Batch(100/243) done. Loss: 0.0049 lr:0.001000 network_time: 0.0263
697
+ [ Thu Sep 15 01:39:56 2022 ] Batch(200/243) done. Loss: 0.0088 lr:0.001000 network_time: 0.0297
698
+ [ Thu Sep 15 01:40:26 2022 ] Eval epoch: 94
699
+ [ Thu Sep 15 01:42:01 2022 ] Mean test loss of 796 batches: 2.4109578132629395.
700
+ [ Thu Sep 15 01:42:02 2022 ] Top1: 53.79%
701
+ [ Thu Sep 15 01:42:02 2022 ] Top5: 83.39%
702
+ [ Thu Sep 15 01:42:02 2022 ] Training epoch: 95
703
+ [ Thu Sep 15 01:42:47 2022 ] Batch(57/243) done. Loss: 0.0056 lr:0.001000 network_time: 0.0256
704
+ [ Thu Sep 15 01:44:00 2022 ] Batch(157/243) done. Loss: 0.0071 lr:0.001000 network_time: 0.0262
705
+ [ Thu Sep 15 01:45:02 2022 ] Eval epoch: 95
706
+ [ Thu Sep 15 01:46:37 2022 ] Mean test loss of 796 batches: 2.416574001312256.
707
+ [ Thu Sep 15 01:46:37 2022 ] Top1: 53.87%
708
+ [ Thu Sep 15 01:46:38 2022 ] Top5: 83.41%
709
+ [ Thu Sep 15 01:46:38 2022 ] Training epoch: 96
710
+ [ Thu Sep 15 01:46:51 2022 ] Batch(14/243) done. Loss: 0.0216 lr:0.001000 network_time: 0.0610
711
+ [ Thu Sep 15 01:48:04 2022 ] Batch(114/243) done. Loss: 0.0123 lr:0.001000 network_time: 0.0275
712
+ [ Thu Sep 15 01:49:17 2022 ] Batch(214/243) done. Loss: 0.0047 lr:0.001000 network_time: 0.0274
713
+ [ Thu Sep 15 01:49:37 2022 ] Eval epoch: 96
714
+ [ Thu Sep 15 01:51:11 2022 ] Mean test loss of 796 batches: 2.443953514099121.
715
+ [ Thu Sep 15 01:51:12 2022 ] Top1: 53.56%
716
+ [ Thu Sep 15 01:51:12 2022 ] Top5: 83.21%
717
+ [ Thu Sep 15 01:51:12 2022 ] Training epoch: 97
718
+ [ Thu Sep 15 01:52:07 2022 ] Batch(71/243) done. Loss: 0.0031 lr:0.001000 network_time: 0.0323
719
+ [ Thu Sep 15 01:53:20 2022 ] Batch(171/243) done. Loss: 0.0060 lr:0.001000 network_time: 0.0280
720
+ [ Thu Sep 15 01:54:12 2022 ] Eval epoch: 97
721
+ [ Thu Sep 15 01:55:47 2022 ] Mean test loss of 796 batches: 2.4506213665008545.
722
+ [ Thu Sep 15 01:55:47 2022 ] Top1: 53.61%
723
+ [ Thu Sep 15 01:55:47 2022 ] Top5: 83.26%
724
+ [ Thu Sep 15 01:55:48 2022 ] Training epoch: 98
725
+ [ Thu Sep 15 01:56:11 2022 ] Batch(28/243) done. Loss: 0.0082 lr:0.001000 network_time: 0.0274
726
+ [ Thu Sep 15 01:57:24 2022 ] Batch(128/243) done. Loss: 0.0251 lr:0.001000 network_time: 0.0305
727
+ [ Thu Sep 15 01:58:37 2022 ] Batch(228/243) done. Loss: 0.0272 lr:0.001000 network_time: 0.0311
728
+ [ Thu Sep 15 01:58:47 2022 ] Eval epoch: 98
729
+ [ Thu Sep 15 02:00:21 2022 ] Mean test loss of 796 batches: 2.4064249992370605.
730
+ [ Thu Sep 15 02:00:22 2022 ] Top1: 54.19%
731
+ [ Thu Sep 15 02:00:22 2022 ] Top5: 83.56%
732
+ [ Thu Sep 15 02:00:22 2022 ] Training epoch: 99
733
+ [ Thu Sep 15 02:01:27 2022 ] Batch(85/243) done. Loss: 0.0195 lr:0.001000 network_time: 0.0282
734
+ [ Thu Sep 15 02:02:40 2022 ] Batch(185/243) done. Loss: 0.0101 lr:0.001000 network_time: 0.0315
735
+ [ Thu Sep 15 02:03:21 2022 ] Eval epoch: 99
736
+ [ Thu Sep 15 02:04:55 2022 ] Mean test loss of 796 batches: 2.4065332412719727.
737
+ [ Thu Sep 15 02:04:56 2022 ] Top1: 54.13%
738
+ [ Thu Sep 15 02:04:56 2022 ] Top5: 83.58%
739
+ [ Thu Sep 15 02:04:56 2022 ] Training epoch: 100
740
+ [ Thu Sep 15 02:05:30 2022 ] Batch(42/243) done. Loss: 0.0187 lr:0.001000 network_time: 0.0292
741
+ [ Thu Sep 15 02:06:43 2022 ] Batch(142/243) done. Loss: 0.0032 lr:0.001000 network_time: 0.0285
742
+ [ Thu Sep 15 02:07:55 2022 ] Batch(242/243) done. Loss: 0.0084 lr:0.001000 network_time: 0.0274
743
+ [ Thu Sep 15 02:07:56 2022 ] Eval epoch: 100
744
+ [ Thu Sep 15 02:09:30 2022 ] Mean test loss of 796 batches: 2.4123075008392334.
745
+ [ Thu Sep 15 02:09:30 2022 ] Top1: 54.21%
746
+ [ Thu Sep 15 02:09:30 2022 ] Top5: 83.67%
747
+ [ Thu Sep 15 02:09:31 2022 ] Training epoch: 101
748
+ [ Thu Sep 15 02:10:46 2022 ] Batch(99/243) done. Loss: 0.0112 lr:0.000100 network_time: 0.0311
749
+ [ Thu Sep 15 02:11:58 2022 ] Batch(199/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0312
750
+ [ Thu Sep 15 02:12:30 2022 ] Eval epoch: 101
751
+ [ Thu Sep 15 02:14:04 2022 ] Mean test loss of 796 batches: 2.413454055786133.
752
+ [ Thu Sep 15 02:14:04 2022 ] Top1: 54.20%
753
+ [ Thu Sep 15 02:14:05 2022 ] Top5: 83.54%
754
+ [ Thu Sep 15 02:14:05 2022 ] Training epoch: 102
755
+ [ Thu Sep 15 02:14:49 2022 ] Batch(56/243) done. Loss: 0.0052 lr:0.000100 network_time: 0.0307
756
+ [ Thu Sep 15 02:16:01 2022 ] Batch(156/243) done. Loss: 0.0132 lr:0.000100 network_time: 0.0269
757
+ [ Thu Sep 15 02:17:04 2022 ] Eval epoch: 102
758
+ [ Thu Sep 15 02:18:38 2022 ] Mean test loss of 796 batches: 2.4486000537872314.
759
+ [ Thu Sep 15 02:18:39 2022 ] Top1: 53.84%
760
+ [ Thu Sep 15 02:18:39 2022 ] Top5: 83.29%
761
+ [ Thu Sep 15 02:18:39 2022 ] Training epoch: 103
762
+ [ Thu Sep 15 02:18:52 2022 ] Batch(13/243) done. Loss: 0.0056 lr:0.000100 network_time: 0.0281
763
+ [ Thu Sep 15 02:20:05 2022 ] Batch(113/243) done. Loss: 0.0058 lr:0.000100 network_time: 0.0279
764
+ [ Thu Sep 15 02:21:17 2022 ] Batch(213/243) done. Loss: 0.0091 lr:0.000100 network_time: 0.0270
765
+ [ Thu Sep 15 02:21:39 2022 ] Eval epoch: 103
766
+ [ Thu Sep 15 02:23:12 2022 ] Mean test loss of 796 batches: 2.4214422702789307.
767
+ [ Thu Sep 15 02:23:13 2022 ] Top1: 53.80%
768
+ [ Thu Sep 15 02:23:14 2022 ] Top5: 83.33%
769
+ [ Thu Sep 15 02:23:14 2022 ] Training epoch: 104
770
+ [ Thu Sep 15 02:24:08 2022 ] Batch(70/243) done. Loss: 0.0093 lr:0.000100 network_time: 0.0286
771
+ [ Thu Sep 15 02:25:21 2022 ] Batch(170/243) done. Loss: 0.0136 lr:0.000100 network_time: 0.0276
772
+ [ Thu Sep 15 02:26:13 2022 ] Eval epoch: 104
773
+ [ Thu Sep 15 02:27:47 2022 ] Mean test loss of 796 batches: 2.4433953762054443.
774
+ [ Thu Sep 15 02:27:47 2022 ] Top1: 53.95%
775
+ [ Thu Sep 15 02:27:48 2022 ] Top5: 83.43%
776
+ [ Thu Sep 15 02:27:48 2022 ] Training epoch: 105
777
+ [ Thu Sep 15 02:28:11 2022 ] Batch(27/243) done. Loss: 0.0233 lr:0.000100 network_time: 0.0272
778
+ [ Thu Sep 15 02:29:24 2022 ] Batch(127/243) done. Loss: 0.0035 lr:0.000100 network_time: 0.0274
779
+ [ Thu Sep 15 02:30:36 2022 ] Batch(227/243) done. Loss: 0.0237 lr:0.000100 network_time: 0.0299
780
+ [ Thu Sep 15 02:30:47 2022 ] Eval epoch: 105
781
+ [ Thu Sep 15 02:32:21 2022 ] Mean test loss of 796 batches: 2.4214446544647217.
782
+ [ Thu Sep 15 02:32:21 2022 ] Top1: 53.84%
783
+ [ Thu Sep 15 02:32:22 2022 ] Top5: 83.38%
784
+ [ Thu Sep 15 02:32:22 2022 ] Training epoch: 106
785
+ [ Thu Sep 15 02:33:26 2022 ] Batch(84/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0311
786
+ [ Thu Sep 15 02:34:39 2022 ] Batch(184/243) done. Loss: 0.0091 lr:0.000100 network_time: 0.0271
787
+ [ Thu Sep 15 02:35:21 2022 ] Eval epoch: 106
788
+ [ Thu Sep 15 02:36:55 2022 ] Mean test loss of 796 batches: 2.4069089889526367.
789
+ [ Thu Sep 15 02:36:55 2022 ] Top1: 54.07%
790
+ [ Thu Sep 15 02:36:56 2022 ] Top5: 83.57%
791
+ [ Thu Sep 15 02:36:56 2022 ] Training epoch: 107
792
+ [ Thu Sep 15 02:37:30 2022 ] Batch(41/243) done. Loss: 0.0087 lr:0.000100 network_time: 0.0273
793
+ [ Thu Sep 15 02:38:42 2022 ] Batch(141/243) done. Loss: 0.0113 lr:0.000100 network_time: 0.0438
794
+ [ Thu Sep 15 02:39:55 2022 ] Batch(241/243) done. Loss: 0.0101 lr:0.000100 network_time: 0.0268
795
+ [ Thu Sep 15 02:39:56 2022 ] Eval epoch: 107
796
+ [ Thu Sep 15 02:41:30 2022 ] Mean test loss of 796 batches: 2.4322874546051025.
797
+ [ Thu Sep 15 02:41:31 2022 ] Top1: 53.90%
798
+ [ Thu Sep 15 02:41:31 2022 ] Top5: 83.52%
799
+ [ Thu Sep 15 02:41:31 2022 ] Training epoch: 108
800
+ [ Thu Sep 15 02:42:46 2022 ] Batch(98/243) done. Loss: 0.0044 lr:0.000100 network_time: 0.0276
801
+ [ Thu Sep 15 02:43:58 2022 ] Batch(198/243) done. Loss: 0.0058 lr:0.000100 network_time: 0.0274
802
+ [ Thu Sep 15 02:44:30 2022 ] Eval epoch: 108
803
+ [ Thu Sep 15 02:46:04 2022 ] Mean test loss of 796 batches: 2.3948800563812256.
804
+ [ Thu Sep 15 02:46:04 2022 ] Top1: 54.13%
805
+ [ Thu Sep 15 02:46:05 2022 ] Top5: 83.52%
806
+ [ Thu Sep 15 02:46:05 2022 ] Training epoch: 109
807
+ [ Thu Sep 15 02:46:49 2022 ] Batch(55/243) done. Loss: 0.0107 lr:0.000100 network_time: 0.0332
808
+ [ Thu Sep 15 02:48:01 2022 ] Batch(155/243) done. Loss: 0.0194 lr:0.000100 network_time: 0.0330
809
+ [ Thu Sep 15 02:49:05 2022 ] Eval epoch: 109
810
+ [ Thu Sep 15 02:50:38 2022 ] Mean test loss of 796 batches: 2.4319751262664795.
811
+ [ Thu Sep 15 02:50:39 2022 ] Top1: 53.66%
812
+ [ Thu Sep 15 02:50:40 2022 ] Top5: 83.33%
813
+ [ Thu Sep 15 02:50:40 2022 ] Training epoch: 110
814
+ [ Thu Sep 15 02:50:52 2022 ] Batch(12/243) done. Loss: 0.0097 lr:0.000100 network_time: 0.0330
815
+ [ Thu Sep 15 02:52:05 2022 ] Batch(112/243) done. Loss: 0.0098 lr:0.000100 network_time: 0.0284
816
+ [ Thu Sep 15 02:53:17 2022 ] Batch(212/243) done. Loss: 0.0089 lr:0.000100 network_time: 0.0325
817
+ [ Thu Sep 15 02:53:39 2022 ] Eval epoch: 110
818
+ [ Thu Sep 15 02:55:14 2022 ] Mean test loss of 796 batches: 2.4360392093658447.
819
+ [ Thu Sep 15 02:55:14 2022 ] Top1: 53.55%
820
+ [ Thu Sep 15 02:55:14 2022 ] Top5: 83.17%
821
+ [ Thu Sep 15 02:55:15 2022 ] Training epoch: 111
822
+ [ Thu Sep 15 02:56:08 2022 ] Batch(69/243) done. Loss: 0.0112 lr:0.000100 network_time: 0.0260
823
+ [ Thu Sep 15 02:57:21 2022 ] Batch(169/243) done. Loss: 0.0484 lr:0.000100 network_time: 0.0275
824
+ [ Thu Sep 15 02:58:14 2022 ] Eval epoch: 111
825
+ [ Thu Sep 15 02:59:48 2022 ] Mean test loss of 796 batches: 2.4042062759399414.
826
+ [ Thu Sep 15 02:59:48 2022 ] Top1: 53.91%
827
+ [ Thu Sep 15 02:59:48 2022 ] Top5: 83.49%
828
+ [ Thu Sep 15 02:59:49 2022 ] Training epoch: 112
829
+ [ Thu Sep 15 03:00:11 2022 ] Batch(26/243) done. Loss: 0.0138 lr:0.000100 network_time: 0.0270
830
+ [ Thu Sep 15 03:01:23 2022 ] Batch(126/243) done. Loss: 0.0057 lr:0.000100 network_time: 0.0274
831
+ [ Thu Sep 15 03:02:36 2022 ] Batch(226/243) done. Loss: 0.0137 lr:0.000100 network_time: 0.0321
832
+ [ Thu Sep 15 03:02:48 2022 ] Eval epoch: 112
833
+ [ Thu Sep 15 03:04:22 2022 ] Mean test loss of 796 batches: 2.4613728523254395.
834
+ [ Thu Sep 15 03:04:23 2022 ] Top1: 53.36%
835
+ [ Thu Sep 15 03:04:23 2022 ] Top5: 82.97%
836
+ [ Thu Sep 15 03:04:23 2022 ] Training epoch: 113
837
+ [ Thu Sep 15 03:05:27 2022 ] Batch(83/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0269
838
+ [ Thu Sep 15 03:06:40 2022 ] Batch(183/243) done. Loss: 0.0038 lr:0.000100 network_time: 0.0254
839
+ [ Thu Sep 15 03:07:23 2022 ] Eval epoch: 113
840
+ [ Thu Sep 15 03:08:56 2022 ] Mean test loss of 796 batches: 2.4727606773376465.
841
+ [ Thu Sep 15 03:08:56 2022 ] Top1: 53.37%
842
+ [ Thu Sep 15 03:08:57 2022 ] Top5: 83.12%
843
+ [ Thu Sep 15 03:08:57 2022 ] Training epoch: 114
844
+ [ Thu Sep 15 03:09:29 2022 ] Batch(40/243) done. Loss: 0.0036 lr:0.000100 network_time: 0.0271
845
+ [ Thu Sep 15 03:10:42 2022 ] Batch(140/243) done. Loss: 0.0052 lr:0.000100 network_time: 0.0302
846
+ [ Thu Sep 15 03:11:54 2022 ] Batch(240/243) done. Loss: 0.0067 lr:0.000100 network_time: 0.0295
847
+ [ Thu Sep 15 03:11:56 2022 ] Eval epoch: 114
848
+ [ Thu Sep 15 03:13:29 2022 ] Mean test loss of 796 batches: 2.4121384620666504.
849
+ [ Thu Sep 15 03:13:30 2022 ] Top1: 53.94%
850
+ [ Thu Sep 15 03:13:30 2022 ] Top5: 83.51%
851
+ [ Thu Sep 15 03:13:30 2022 ] Training epoch: 115
852
+ [ Thu Sep 15 03:14:44 2022 ] Batch(97/243) done. Loss: 0.0103 lr:0.000100 network_time: 0.0335
853
+ [ Thu Sep 15 03:15:57 2022 ] Batch(197/243) done. Loss: 0.0115 lr:0.000100 network_time: 0.0317
854
+ [ Thu Sep 15 03:16:30 2022 ] Eval epoch: 115
855
+ [ Thu Sep 15 03:18:04 2022 ] Mean test loss of 796 batches: 2.38508939743042.
856
+ [ Thu Sep 15 03:18:04 2022 ] Top1: 54.29%
857
+ [ Thu Sep 15 03:18:04 2022 ] Top5: 83.57%
858
+ [ Thu Sep 15 03:18:05 2022 ] Training epoch: 116
859
+ [ Thu Sep 15 03:18:47 2022 ] Batch(54/243) done. Loss: 0.0095 lr:0.000100 network_time: 0.0289
860
+ [ Thu Sep 15 03:20:00 2022 ] Batch(154/243) done. Loss: 0.0051 lr:0.000100 network_time: 0.0321
861
+ [ Thu Sep 15 03:21:04 2022 ] Eval epoch: 116
862
+ [ Thu Sep 15 03:22:38 2022 ] Mean test loss of 796 batches: 2.406360387802124.
863
+ [ Thu Sep 15 03:22:38 2022 ] Top1: 54.08%
864
+ [ Thu Sep 15 03:22:39 2022 ] Top5: 83.59%
865
+ [ Thu Sep 15 03:22:39 2022 ] Training epoch: 117
866
+ [ Thu Sep 15 03:22:50 2022 ] Batch(11/243) done. Loss: 0.0072 lr:0.000100 network_time: 0.0277
867
+ [ Thu Sep 15 03:24:03 2022 ] Batch(111/243) done. Loss: 0.0053 lr:0.000100 network_time: 0.0322
868
+ [ Thu Sep 15 03:25:16 2022 ] Batch(211/243) done. Loss: 0.0159 lr:0.000100 network_time: 0.0300
869
+ [ Thu Sep 15 03:25:38 2022 ] Eval epoch: 117
870
+ [ Thu Sep 15 03:27:12 2022 ] Mean test loss of 796 batches: 2.4006166458129883.
871
+ [ Thu Sep 15 03:27:12 2022 ] Top1: 54.11%
872
+ [ Thu Sep 15 03:27:13 2022 ] Top5: 83.51%
873
+ [ Thu Sep 15 03:27:13 2022 ] Training epoch: 118
874
+ [ Thu Sep 15 03:28:06 2022 ] Batch(68/243) done. Loss: 0.0068 lr:0.000100 network_time: 0.0282
875
+ [ Thu Sep 15 03:29:18 2022 ] Batch(168/243) done. Loss: 0.0164 lr:0.000100 network_time: 0.0269
876
+ [ Thu Sep 15 03:30:12 2022 ] Eval epoch: 118
877
+ [ Thu Sep 15 03:31:46 2022 ] Mean test loss of 796 batches: 2.438443183898926.
878
+ [ Thu Sep 15 03:31:47 2022 ] Top1: 53.62%
879
+ [ Thu Sep 15 03:31:47 2022 ] Top5: 83.11%
880
+ [ Thu Sep 15 03:31:47 2022 ] Training epoch: 119
881
+ [ Thu Sep 15 03:32:09 2022 ] Batch(25/243) done. Loss: 0.0219 lr:0.000100 network_time: 0.0284
882
+ [ Thu Sep 15 03:33:21 2022 ] Batch(125/243) done. Loss: 0.0118 lr:0.000100 network_time: 0.0332
883
+ [ Thu Sep 15 03:34:34 2022 ] Batch(225/243) done. Loss: 0.0198 lr:0.000100 network_time: 0.0322
884
+ [ Thu Sep 15 03:34:47 2022 ] Eval epoch: 119
885
+ [ Thu Sep 15 03:36:21 2022 ] Mean test loss of 796 batches: 2.43454909324646.
886
+ [ Thu Sep 15 03:36:21 2022 ] Top1: 53.57%
887
+ [ Thu Sep 15 03:36:22 2022 ] Top5: 83.23%
888
+ [ Thu Sep 15 03:36:22 2022 ] Training epoch: 120
889
+ [ Thu Sep 15 03:37:25 2022 ] Batch(82/243) done. Loss: 0.0118 lr:0.000100 network_time: 0.0273
890
+ [ Thu Sep 15 03:38:37 2022 ] Batch(182/243) done. Loss: 0.0137 lr:0.000100 network_time: 0.0315
891
+ [ Thu Sep 15 03:39:21 2022 ] Eval epoch: 120
892
+ [ Thu Sep 15 03:40:55 2022 ] Mean test loss of 796 batches: 2.455756902694702.
893
+ [ Thu Sep 15 03:40:56 2022 ] Top1: 53.77%
894
+ [ Thu Sep 15 03:40:56 2022 ] Top5: 83.27%
895
+ [ Thu Sep 15 03:40:57 2022 ] Training epoch: 121
896
+ [ Thu Sep 15 03:41:28 2022 ] Batch(39/243) done. Loss: 0.0172 lr:0.000100 network_time: 0.0269
897
+ [ Thu Sep 15 03:42:41 2022 ] Batch(139/243) done. Loss: 0.0125 lr:0.000100 network_time: 0.0266
898
+ [ Thu Sep 15 03:43:53 2022 ] Batch(239/243) done. Loss: 0.0273 lr:0.000100 network_time: 0.0302
899
+ [ Thu Sep 15 03:43:56 2022 ] Eval epoch: 121
900
+ [ Thu Sep 15 03:45:30 2022 ] Mean test loss of 796 batches: 2.4042556285858154.
901
+ [ Thu Sep 15 03:45:30 2022 ] Top1: 54.06%
902
+ [ Thu Sep 15 03:45:30 2022 ] Top5: 83.58%
903
+ [ Thu Sep 15 03:45:31 2022 ] Training epoch: 122
904
+ [ Thu Sep 15 03:46:44 2022 ] Batch(96/243) done. Loss: 0.0129 lr:0.000100 network_time: 0.0270
905
+ [ Thu Sep 15 03:47:56 2022 ] Batch(196/243) done. Loss: 0.0073 lr:0.000100 network_time: 0.0320
906
+ [ Thu Sep 15 03:48:30 2022 ] Eval epoch: 122
907
+ [ Thu Sep 15 03:50:04 2022 ] Mean test loss of 796 batches: 2.415961980819702.
908
+ [ Thu Sep 15 03:50:04 2022 ] Top1: 54.06%
909
+ [ Thu Sep 15 03:50:05 2022 ] Top5: 83.58%
910
+ [ Thu Sep 15 03:50:05 2022 ] Training epoch: 123
911
+ [ Thu Sep 15 03:50:47 2022 ] Batch(53/243) done. Loss: 0.0038 lr:0.000100 network_time: 0.0278
912
+ [ Thu Sep 15 03:51:59 2022 ] Batch(153/243) done. Loss: 0.0056 lr:0.000100 network_time: 0.0275
913
+ [ Thu Sep 15 03:53:04 2022 ] Eval epoch: 123
914
+ [ Thu Sep 15 03:54:38 2022 ] Mean test loss of 796 batches: 2.43465518951416.
915
+ [ Thu Sep 15 03:54:39 2022 ] Top1: 53.76%
916
+ [ Thu Sep 15 03:54:39 2022 ] Top5: 83.39%
917
+ [ Thu Sep 15 03:54:39 2022 ] Training epoch: 124
918
+ [ Thu Sep 15 03:54:50 2022 ] Batch(10/243) done. Loss: 0.0133 lr:0.000100 network_time: 0.0273
919
+ [ Thu Sep 15 03:56:02 2022 ] Batch(110/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0279
920
+ [ Thu Sep 15 03:57:15 2022 ] Batch(210/243) done. Loss: 0.0071 lr:0.000100 network_time: 0.0278
921
+ [ Thu Sep 15 03:57:39 2022 ] Eval epoch: 124
922
+ [ Thu Sep 15 03:59:13 2022 ] Mean test loss of 796 batches: 2.3815648555755615.
923
+ [ Thu Sep 15 03:59:13 2022 ] Top1: 54.19%
924
+ [ Thu Sep 15 03:59:13 2022 ] Top5: 83.69%
925
+ [ Thu Sep 15 03:59:14 2022 ] Training epoch: 125
926
+ [ Thu Sep 15 04:00:06 2022 ] Batch(67/243) done. Loss: 0.0084 lr:0.000100 network_time: 0.0275
927
+ [ Thu Sep 15 04:01:18 2022 ] Batch(167/243) done. Loss: 0.0476 lr:0.000100 network_time: 0.0261
928
+ [ Thu Sep 15 04:02:13 2022 ] Eval epoch: 125
929
+ [ Thu Sep 15 04:03:47 2022 ] Mean test loss of 796 batches: 2.405188798904419.
930
+ [ Thu Sep 15 04:03:47 2022 ] Top1: 53.87%
931
+ [ Thu Sep 15 04:03:48 2022 ] Top5: 83.34%
932
+ [ Thu Sep 15 04:03:48 2022 ] Training epoch: 126
933
+ [ Thu Sep 15 04:04:08 2022 ] Batch(24/243) done. Loss: 0.0119 lr:0.000100 network_time: 0.0279
934
+ [ Thu Sep 15 04:05:21 2022 ] Batch(124/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0319
935
+ [ Thu Sep 15 04:06:34 2022 ] Batch(224/243) done. Loss: 0.0133 lr:0.000100 network_time: 0.0270
936
+ [ Thu Sep 15 04:06:47 2022 ] Eval epoch: 126
937
+ [ Thu Sep 15 04:08:21 2022 ] Mean test loss of 796 batches: 2.470874309539795.
938
+ [ Thu Sep 15 04:08:21 2022 ] Top1: 53.76%
939
+ [ Thu Sep 15 04:08:22 2022 ] Top5: 83.41%
940
+ [ Thu Sep 15 04:08:22 2022 ] Training epoch: 127
941
+ [ Thu Sep 15 04:09:24 2022 ] Batch(81/243) done. Loss: 0.0116 lr:0.000100 network_time: 0.0272
942
+ [ Thu Sep 15 04:10:37 2022 ] Batch(181/243) done. Loss: 0.0150 lr:0.000100 network_time: 0.0280
943
+ [ Thu Sep 15 04:11:21 2022 ] Eval epoch: 127
944
+ [ Thu Sep 15 04:12:54 2022 ] Mean test loss of 796 batches: 2.4488444328308105.
945
+ [ Thu Sep 15 04:12:55 2022 ] Top1: 53.71%
946
+ [ Thu Sep 15 04:12:56 2022 ] Top5: 83.29%
947
+ [ Thu Sep 15 04:12:56 2022 ] Training epoch: 128
948
+ [ Thu Sep 15 04:13:27 2022 ] Batch(38/243) done. Loss: 0.0214 lr:0.000100 network_time: 0.0290
949
+ [ Thu Sep 15 04:14:40 2022 ] Batch(138/243) done. Loss: 0.0040 lr:0.000100 network_time: 0.0275
950
+ [ Thu Sep 15 04:15:52 2022 ] Batch(238/243) done. Loss: 0.0129 lr:0.000100 network_time: 0.0233
951
+ [ Thu Sep 15 04:15:55 2022 ] Eval epoch: 128
952
+ [ Thu Sep 15 04:17:30 2022 ] Mean test loss of 796 batches: 2.4242472648620605.
953
+ [ Thu Sep 15 04:17:30 2022 ] Top1: 53.89%
954
+ [ Thu Sep 15 04:17:31 2022 ] Top5: 83.42%
955
+ [ Thu Sep 15 04:17:31 2022 ] Training epoch: 129
956
+ [ Thu Sep 15 04:18:43 2022 ] Batch(95/243) done. Loss: 0.0464 lr:0.000100 network_time: 0.0281
957
+ [ Thu Sep 15 04:19:56 2022 ] Batch(195/243) done. Loss: 0.0134 lr:0.000100 network_time: 0.0312
958
+ [ Thu Sep 15 04:20:30 2022 ] Eval epoch: 129
959
+ [ Thu Sep 15 04:22:05 2022 ] Mean test loss of 796 batches: 2.4200432300567627.
960
+ [ Thu Sep 15 04:22:05 2022 ] Top1: 53.90%
961
+ [ Thu Sep 15 04:22:06 2022 ] Top5: 83.39%
962
+ [ Thu Sep 15 04:22:06 2022 ] Training epoch: 130
963
+ [ Thu Sep 15 04:22:47 2022 ] Batch(52/243) done. Loss: 0.0061 lr:0.000100 network_time: 0.0272
964
+ [ Thu Sep 15 04:24:00 2022 ] Batch(152/243) done. Loss: 0.0041 lr:0.000100 network_time: 0.0273
965
+ [ Thu Sep 15 04:25:05 2022 ] Eval epoch: 130
966
+ [ Thu Sep 15 04:26:39 2022 ] Mean test loss of 796 batches: 2.39705491065979.
967
+ [ Thu Sep 15 04:26:39 2022 ] Top1: 54.33%
968
+ [ Thu Sep 15 04:26:40 2022 ] Top5: 83.68%
969
+ [ Thu Sep 15 04:26:40 2022 ] Training epoch: 131
970
+ [ Thu Sep 15 04:26:50 2022 ] Batch(9/243) done. Loss: 0.0103 lr:0.000100 network_time: 0.0366
971
+ [ Thu Sep 15 04:28:03 2022 ] Batch(109/243) done. Loss: 0.0039 lr:0.000100 network_time: 0.0299
972
+ [ Thu Sep 15 04:29:15 2022 ] Batch(209/243) done. Loss: 0.0089 lr:0.000100 network_time: 0.0311
973
+ [ Thu Sep 15 04:29:39 2022 ] Eval epoch: 131
974
+ [ Thu Sep 15 04:31:13 2022 ] Mean test loss of 796 batches: 2.4424407482147217.
975
+ [ Thu Sep 15 04:31:13 2022 ] Top1: 53.93%
976
+ [ Thu Sep 15 04:31:14 2022 ] Top5: 83.32%
977
+ [ Thu Sep 15 04:31:14 2022 ] Training epoch: 132
978
+ [ Thu Sep 15 04:32:05 2022 ] Batch(66/243) done. Loss: 0.0062 lr:0.000100 network_time: 0.0279
979
+ [ Thu Sep 15 04:33:18 2022 ] Batch(166/243) done. Loss: 0.0148 lr:0.000100 network_time: 0.0305
980
+ [ Thu Sep 15 04:34:13 2022 ] Eval epoch: 132
981
+ [ Thu Sep 15 04:35:47 2022 ] Mean test loss of 796 batches: 2.369658946990967.
982
+ [ Thu Sep 15 04:35:47 2022 ] Top1: 54.38%
983
+ [ Thu Sep 15 04:35:48 2022 ] Top5: 83.72%
984
+ [ Thu Sep 15 04:35:48 2022 ] Training epoch: 133
985
+ [ Thu Sep 15 04:36:08 2022 ] Batch(23/243) done. Loss: 0.0091 lr:0.000100 network_time: 0.0276
986
+ [ Thu Sep 15 04:37:20 2022 ] Batch(123/243) done. Loss: 0.0055 lr:0.000100 network_time: 0.0268
987
+ [ Thu Sep 15 04:38:33 2022 ] Batch(223/243) done. Loss: 0.0116 lr:0.000100 network_time: 0.0262
988
+ [ Thu Sep 15 04:38:47 2022 ] Eval epoch: 133
989
+ [ Thu Sep 15 04:40:21 2022 ] Mean test loss of 796 batches: 2.389495611190796.
990
+ [ Thu Sep 15 04:40:22 2022 ] Top1: 53.83%
991
+ [ Thu Sep 15 04:40:22 2022 ] Top5: 83.35%
992
+ [ Thu Sep 15 04:40:23 2022 ] Training epoch: 134
993
+ [ Thu Sep 15 04:41:24 2022 ] Batch(80/243) done. Loss: 0.0105 lr:0.000100 network_time: 0.0282
994
+ [ Thu Sep 15 04:42:37 2022 ] Batch(180/243) done. Loss: 0.0188 lr:0.000100 network_time: 0.0320
995
+ [ Thu Sep 15 04:43:22 2022 ] Eval epoch: 134
996
+ [ Thu Sep 15 04:44:56 2022 ] Mean test loss of 796 batches: 2.4094290733337402.
997
+ [ Thu Sep 15 04:44:57 2022 ] Top1: 54.14%
998
+ [ Thu Sep 15 04:44:58 2022 ] Top5: 83.63%
999
+ [ Thu Sep 15 04:44:58 2022 ] Training epoch: 135
1000
+ [ Thu Sep 15 04:45:28 2022 ] Batch(37/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0298
1001
+ [ Thu Sep 15 04:46:40 2022 ] Batch(137/243) done. Loss: 0.0224 lr:0.000100 network_time: 0.0307
1002
+ [ Thu Sep 15 04:47:53 2022 ] Batch(237/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0272
1003
+ [ Thu Sep 15 04:47:57 2022 ] Eval epoch: 135
1004
+ [ Thu Sep 15 04:49:32 2022 ] Mean test loss of 796 batches: 2.4336893558502197.
1005
+ [ Thu Sep 15 04:49:32 2022 ] Top1: 53.72%
1006
+ [ Thu Sep 15 04:49:33 2022 ] Top5: 83.31%
1007
+ [ Thu Sep 15 04:49:33 2022 ] Training epoch: 136
1008
+ [ Thu Sep 15 04:50:45 2022 ] Batch(94/243) done. Loss: 0.0068 lr:0.000100 network_time: 0.0270
1009
+ [ Thu Sep 15 04:51:57 2022 ] Batch(194/243) done. Loss: 0.0108 lr:0.000100 network_time: 0.0319
1010
+ [ Thu Sep 15 04:52:33 2022 ] Eval epoch: 136
1011
+ [ Thu Sep 15 04:54:07 2022 ] Mean test loss of 796 batches: 2.4274051189422607.
1012
+ [ Thu Sep 15 04:54:07 2022 ] Top1: 53.80%
1013
+ [ Thu Sep 15 04:54:07 2022 ] Top5: 83.48%
1014
+ [ Thu Sep 15 04:54:08 2022 ] Training epoch: 137
1015
+ [ Thu Sep 15 04:54:48 2022 ] Batch(51/243) done. Loss: 0.0082 lr:0.000100 network_time: 0.0320
1016
+ [ Thu Sep 15 04:56:01 2022 ] Batch(151/243) done. Loss: 0.0078 lr:0.000100 network_time: 0.0274
1017
+ [ Thu Sep 15 04:57:07 2022 ] Eval epoch: 137
1018
+ [ Thu Sep 15 04:58:41 2022 ] Mean test loss of 796 batches: 2.395327091217041.
1019
+ [ Thu Sep 15 04:58:41 2022 ] Top1: 54.43%
1020
+ [ Thu Sep 15 04:58:42 2022 ] Top5: 83.79%
1021
+ [ Thu Sep 15 04:58:42 2022 ] Training epoch: 138
1022
+ [ Thu Sep 15 04:58:51 2022 ] Batch(8/243) done. Loss: 0.0090 lr:0.000100 network_time: 0.0316
1023
+ [ Thu Sep 15 05:00:04 2022 ] Batch(108/243) done. Loss: 0.0243 lr:0.000100 network_time: 0.0277
1024
+ [ Thu Sep 15 05:01:17 2022 ] Batch(208/243) done. Loss: 0.0360 lr:0.000100 network_time: 0.0318
1025
+ [ Thu Sep 15 05:01:42 2022 ] Eval epoch: 138
1026
+ [ Thu Sep 15 05:03:16 2022 ] Mean test loss of 796 batches: 2.4107162952423096.
1027
+ [ Thu Sep 15 05:03:16 2022 ] Top1: 53.67%
1028
+ [ Thu Sep 15 05:03:17 2022 ] Top5: 83.25%
1029
+ [ Thu Sep 15 05:03:17 2022 ] Training epoch: 139
1030
+ [ Thu Sep 15 05:04:07 2022 ] Batch(65/243) done. Loss: 0.0301 lr:0.000100 network_time: 0.0274
1031
+ [ Thu Sep 15 05:05:20 2022 ] Batch(165/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0282
1032
+ [ Thu Sep 15 05:06:16 2022 ] Eval epoch: 139
1033
+ [ Thu Sep 15 05:07:50 2022 ] Mean test loss of 796 batches: 2.4515626430511475.
1034
+ [ Thu Sep 15 05:07:50 2022 ] Top1: 53.41%
1035
+ [ Thu Sep 15 05:07:51 2022 ] Top5: 83.25%
1036
+ [ Thu Sep 15 05:07:51 2022 ] Training epoch: 140
1037
+ [ Thu Sep 15 05:08:11 2022 ] Batch(22/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0266
1038
+ [ Thu Sep 15 05:09:23 2022 ] Batch(122/243) done. Loss: 0.0135 lr:0.000100 network_time: 0.0280
1039
+ [ Thu Sep 15 05:10:36 2022 ] Batch(222/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0306
1040
+ [ Thu Sep 15 05:10:51 2022 ] Eval epoch: 140
1041
+ [ Thu Sep 15 05:12:25 2022 ] Mean test loss of 796 batches: 2.40610671043396.
1042
+ [ Thu Sep 15 05:12:26 2022 ] Top1: 54.18%
1043
+ [ Thu Sep 15 05:12:26 2022 ] Top5: 83.66%
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_bone_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_joint_motion_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xsub/train_joint_motion.yaml
5
+ device:
6
+ - 6
7
+ - 7
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_joint_motion_xsub
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_joint_motion_xsub
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:de10912eab25311307f44b8b05e3e3d54ffcf6211fc9bef161e5f600473a2510
3
+ size 29946137
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/log.txt ADDED
@@ -0,0 +1,1043 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 14 18:31:51 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_joint_motion_xsub', 'model_saved_name': './save_models/ntu120_joint_motion_xsub', 'Experiment_name': 'ntu120_joint_motion_xsub', 'config': './config/ntu120_xsub/train_joint_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [6, 7], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Wed Sep 14 18:31:51 2022 ] Training epoch: 1
5
+ [ Wed Sep 14 18:33:10 2022 ] Batch(99/243) done. Loss: 4.0202 lr:0.100000 network_time: 0.0268
6
+ [ Wed Sep 14 18:34:23 2022 ] Batch(199/243) done. Loss: 3.0476 lr:0.100000 network_time: 0.0268
7
+ [ Wed Sep 14 18:34:55 2022 ] Eval epoch: 1
8
+ [ Wed Sep 14 18:36:30 2022 ] Mean test loss of 796 batches: 5.69941520690918.
9
+ [ Wed Sep 14 18:36:30 2022 ] Top1: 8.02%
10
+ [ Wed Sep 14 18:36:31 2022 ] Top5: 20.03%
11
+ [ Wed Sep 14 18:36:31 2022 ] Training epoch: 2
12
+ [ Wed Sep 14 18:37:15 2022 ] Batch(56/243) done. Loss: 2.7135 lr:0.100000 network_time: 0.0278
13
+ [ Wed Sep 14 18:38:28 2022 ] Batch(156/243) done. Loss: 2.2763 lr:0.100000 network_time: 0.0318
14
+ [ Wed Sep 14 18:39:31 2022 ] Eval epoch: 2
15
+ [ Wed Sep 14 18:41:06 2022 ] Mean test loss of 796 batches: 4.1878180503845215.
16
+ [ Wed Sep 14 18:41:06 2022 ] Top1: 16.12%
17
+ [ Wed Sep 14 18:41:06 2022 ] Top5: 36.34%
18
+ [ Wed Sep 14 18:41:07 2022 ] Training epoch: 3
19
+ [ Wed Sep 14 18:41:20 2022 ] Batch(13/243) done. Loss: 1.7547 lr:0.100000 network_time: 0.0262
20
+ [ Wed Sep 14 18:42:33 2022 ] Batch(113/243) done. Loss: 1.4097 lr:0.100000 network_time: 0.0317
21
+ [ Wed Sep 14 18:43:45 2022 ] Batch(213/243) done. Loss: 1.7054 lr:0.100000 network_time: 0.0262
22
+ [ Wed Sep 14 18:44:07 2022 ] Eval epoch: 3
23
+ [ Wed Sep 14 18:45:41 2022 ] Mean test loss of 796 batches: 4.271505355834961.
24
+ [ Wed Sep 14 18:45:41 2022 ] Top1: 18.78%
25
+ [ Wed Sep 14 18:45:42 2022 ] Top5: 44.19%
26
+ [ Wed Sep 14 18:45:42 2022 ] Training epoch: 4
27
+ [ Wed Sep 14 18:46:37 2022 ] Batch(70/243) done. Loss: 1.4182 lr:0.100000 network_time: 0.0281
28
+ [ Wed Sep 14 18:47:49 2022 ] Batch(170/243) done. Loss: 1.4677 lr:0.100000 network_time: 0.0263
29
+ [ Wed Sep 14 18:48:42 2022 ] Eval epoch: 4
30
+ [ Wed Sep 14 18:50:17 2022 ] Mean test loss of 796 batches: 3.8030338287353516.
31
+ [ Wed Sep 14 18:50:17 2022 ] Top1: 23.18%
32
+ [ Wed Sep 14 18:50:17 2022 ] Top5: 49.43%
33
+ [ Wed Sep 14 18:50:18 2022 ] Training epoch: 5
34
+ [ Wed Sep 14 18:50:41 2022 ] Batch(27/243) done. Loss: 1.4289 lr:0.100000 network_time: 0.0280
35
+ [ Wed Sep 14 18:51:54 2022 ] Batch(127/243) done. Loss: 1.2314 lr:0.100000 network_time: 0.0284
36
+ [ Wed Sep 14 18:53:07 2022 ] Batch(227/243) done. Loss: 1.4400 lr:0.100000 network_time: 0.0274
37
+ [ Wed Sep 14 18:53:18 2022 ] Eval epoch: 5
38
+ [ Wed Sep 14 18:54:52 2022 ] Mean test loss of 796 batches: 3.845407247543335.
39
+ [ Wed Sep 14 18:54:53 2022 ] Top1: 21.89%
40
+ [ Wed Sep 14 18:54:53 2022 ] Top5: 49.36%
41
+ [ Wed Sep 14 18:54:53 2022 ] Training epoch: 6
42
+ [ Wed Sep 14 18:55:58 2022 ] Batch(84/243) done. Loss: 1.2397 lr:0.100000 network_time: 0.0279
43
+ [ Wed Sep 14 18:57:11 2022 ] Batch(184/243) done. Loss: 1.0354 lr:0.100000 network_time: 0.0275
44
+ [ Wed Sep 14 18:57:53 2022 ] Eval epoch: 6
45
+ [ Wed Sep 14 18:59:28 2022 ] Mean test loss of 796 batches: 3.845848560333252.
46
+ [ Wed Sep 14 18:59:28 2022 ] Top1: 26.16%
47
+ [ Wed Sep 14 18:59:29 2022 ] Top5: 56.40%
48
+ [ Wed Sep 14 18:59:29 2022 ] Training epoch: 7
49
+ [ Wed Sep 14 19:00:02 2022 ] Batch(41/243) done. Loss: 1.1685 lr:0.100000 network_time: 0.0262
50
+ [ Wed Sep 14 19:01:15 2022 ] Batch(141/243) done. Loss: 0.8339 lr:0.100000 network_time: 0.0276
51
+ [ Wed Sep 14 19:02:28 2022 ] Batch(241/243) done. Loss: 0.9553 lr:0.100000 network_time: 0.0281
52
+ [ Wed Sep 14 19:02:29 2022 ] Eval epoch: 7
53
+ [ Wed Sep 14 19:04:04 2022 ] Mean test loss of 796 batches: 3.6132612228393555.
54
+ [ Wed Sep 14 19:04:04 2022 ] Top1: 26.16%
55
+ [ Wed Sep 14 19:04:05 2022 ] Top5: 57.86%
56
+ [ Wed Sep 14 19:04:05 2022 ] Training epoch: 8
57
+ [ Wed Sep 14 19:05:20 2022 ] Batch(98/243) done. Loss: 0.7015 lr:0.100000 network_time: 0.0276
58
+ [ Wed Sep 14 19:06:33 2022 ] Batch(198/243) done. Loss: 0.8786 lr:0.100000 network_time: 0.0285
59
+ [ Wed Sep 14 19:07:05 2022 ] Eval epoch: 8
60
+ [ Wed Sep 14 19:08:40 2022 ] Mean test loss of 796 batches: 3.2343382835388184.
61
+ [ Wed Sep 14 19:08:40 2022 ] Top1: 29.52%
62
+ [ Wed Sep 14 19:08:41 2022 ] Top5: 62.62%
63
+ [ Wed Sep 14 19:08:41 2022 ] Training epoch: 9
64
+ [ Wed Sep 14 19:09:24 2022 ] Batch(55/243) done. Loss: 1.0992 lr:0.100000 network_time: 0.0314
65
+ [ Wed Sep 14 19:10:37 2022 ] Batch(155/243) done. Loss: 0.8874 lr:0.100000 network_time: 0.0276
66
+ [ Wed Sep 14 19:11:41 2022 ] Eval epoch: 9
67
+ [ Wed Sep 14 19:13:16 2022 ] Mean test loss of 796 batches: 3.12418270111084.
68
+ [ Wed Sep 14 19:13:16 2022 ] Top1: 34.95%
69
+ [ Wed Sep 14 19:13:16 2022 ] Top5: 66.12%
70
+ [ Wed Sep 14 19:13:17 2022 ] Training epoch: 10
71
+ [ Wed Sep 14 19:13:29 2022 ] Batch(12/243) done. Loss: 1.0292 lr:0.100000 network_time: 0.0220
72
+ [ Wed Sep 14 19:14:42 2022 ] Batch(112/243) done. Loss: 0.8647 lr:0.100000 network_time: 0.0256
73
+ [ Wed Sep 14 19:15:55 2022 ] Batch(212/243) done. Loss: 1.3004 lr:0.100000 network_time: 0.0311
74
+ [ Wed Sep 14 19:16:17 2022 ] Eval epoch: 10
75
+ [ Wed Sep 14 19:17:51 2022 ] Mean test loss of 796 batches: 3.8285813331604004.
76
+ [ Wed Sep 14 19:17:51 2022 ] Top1: 30.90%
77
+ [ Wed Sep 14 19:17:52 2022 ] Top5: 63.59%
78
+ [ Wed Sep 14 19:17:52 2022 ] Training epoch: 11
79
+ [ Wed Sep 14 19:18:46 2022 ] Batch(69/243) done. Loss: 0.8339 lr:0.100000 network_time: 0.0323
80
+ [ Wed Sep 14 19:19:59 2022 ] Batch(169/243) done. Loss: 0.7797 lr:0.100000 network_time: 0.0273
81
+ [ Wed Sep 14 19:20:52 2022 ] Eval epoch: 11
82
+ [ Wed Sep 14 19:22:26 2022 ] Mean test loss of 796 batches: 3.172834634780884.
83
+ [ Wed Sep 14 19:22:27 2022 ] Top1: 36.06%
84
+ [ Wed Sep 14 19:22:27 2022 ] Top5: 70.82%
85
+ [ Wed Sep 14 19:22:27 2022 ] Training epoch: 12
86
+ [ Wed Sep 14 19:22:50 2022 ] Batch(26/243) done. Loss: 0.3926 lr:0.100000 network_time: 0.0273
87
+ [ Wed Sep 14 19:24:03 2022 ] Batch(126/243) done. Loss: 0.9194 lr:0.100000 network_time: 0.0269
88
+ [ Wed Sep 14 19:25:16 2022 ] Batch(226/243) done. Loss: 0.9495 lr:0.100000 network_time: 0.0307
89
+ [ Wed Sep 14 19:25:27 2022 ] Eval epoch: 12
90
+ [ Wed Sep 14 19:27:02 2022 ] Mean test loss of 796 batches: 3.429459810256958.
91
+ [ Wed Sep 14 19:27:02 2022 ] Top1: 33.83%
92
+ [ Wed Sep 14 19:27:02 2022 ] Top5: 67.91%
93
+ [ Wed Sep 14 19:27:03 2022 ] Training epoch: 13
94
+ [ Wed Sep 14 19:28:07 2022 ] Batch(83/243) done. Loss: 0.5501 lr:0.100000 network_time: 0.0260
95
+ [ Wed Sep 14 19:29:20 2022 ] Batch(183/243) done. Loss: 0.8154 lr:0.100000 network_time: 0.0303
96
+ [ Wed Sep 14 19:30:03 2022 ] Eval epoch: 13
97
+ [ Wed Sep 14 19:31:37 2022 ] Mean test loss of 796 batches: 3.606862783432007.
98
+ [ Wed Sep 14 19:31:37 2022 ] Top1: 27.25%
99
+ [ Wed Sep 14 19:31:38 2022 ] Top5: 57.85%
100
+ [ Wed Sep 14 19:31:38 2022 ] Training epoch: 14
101
+ [ Wed Sep 14 19:32:11 2022 ] Batch(40/243) done. Loss: 0.4214 lr:0.100000 network_time: 0.0271
102
+ [ Wed Sep 14 19:33:24 2022 ] Batch(140/243) done. Loss: 0.8487 lr:0.100000 network_time: 0.0275
103
+ [ Wed Sep 14 19:34:37 2022 ] Batch(240/243) done. Loss: 0.6233 lr:0.100000 network_time: 0.0263
104
+ [ Wed Sep 14 19:34:38 2022 ] Eval epoch: 14
105
+ [ Wed Sep 14 19:36:13 2022 ] Mean test loss of 796 batches: 4.171570777893066.
106
+ [ Wed Sep 14 19:36:13 2022 ] Top1: 29.66%
107
+ [ Wed Sep 14 19:36:14 2022 ] Top5: 58.28%
108
+ [ Wed Sep 14 19:36:14 2022 ] Training epoch: 15
109
+ [ Wed Sep 14 19:37:28 2022 ] Batch(97/243) done. Loss: 0.5808 lr:0.100000 network_time: 0.0312
110
+ [ Wed Sep 14 19:38:41 2022 ] Batch(197/243) done. Loss: 0.5147 lr:0.100000 network_time: 0.0266
111
+ [ Wed Sep 14 19:39:14 2022 ] Eval epoch: 15
112
+ [ Wed Sep 14 19:40:48 2022 ] Mean test loss of 796 batches: 3.827831506729126.
113
+ [ Wed Sep 14 19:40:49 2022 ] Top1: 33.09%
114
+ [ Wed Sep 14 19:40:49 2022 ] Top5: 68.44%
115
+ [ Wed Sep 14 19:40:49 2022 ] Training epoch: 16
116
+ [ Wed Sep 14 19:41:32 2022 ] Batch(54/243) done. Loss: 0.4489 lr:0.100000 network_time: 0.0278
117
+ [ Wed Sep 14 19:42:45 2022 ] Batch(154/243) done. Loss: 0.5374 lr:0.100000 network_time: 0.0310
118
+ [ Wed Sep 14 19:43:50 2022 ] Eval epoch: 16
119
+ [ Wed Sep 14 19:45:24 2022 ] Mean test loss of 796 batches: 3.337961196899414.
120
+ [ Wed Sep 14 19:45:25 2022 ] Top1: 36.75%
121
+ [ Wed Sep 14 19:45:25 2022 ] Top5: 68.71%
122
+ [ Wed Sep 14 19:45:25 2022 ] Training epoch: 17
123
+ [ Wed Sep 14 19:45:37 2022 ] Batch(11/243) done. Loss: 0.6355 lr:0.100000 network_time: 0.0260
124
+ [ Wed Sep 14 19:46:50 2022 ] Batch(111/243) done. Loss: 0.5497 lr:0.100000 network_time: 0.0262
125
+ [ Wed Sep 14 19:48:02 2022 ] Batch(211/243) done. Loss: 0.9412 lr:0.100000 network_time: 0.0284
126
+ [ Wed Sep 14 19:48:25 2022 ] Eval epoch: 17
127
+ [ Wed Sep 14 19:50:00 2022 ] Mean test loss of 796 batches: 3.2908499240875244.
128
+ [ Wed Sep 14 19:50:00 2022 ] Top1: 37.76%
129
+ [ Wed Sep 14 19:50:01 2022 ] Top5: 71.83%
130
+ [ Wed Sep 14 19:50:01 2022 ] Training epoch: 18
131
+ [ Wed Sep 14 19:50:54 2022 ] Batch(68/243) done. Loss: 0.5626 lr:0.100000 network_time: 0.0268
132
+ [ Wed Sep 14 19:52:07 2022 ] Batch(168/243) done. Loss: 0.5032 lr:0.100000 network_time: 0.0250
133
+ [ Wed Sep 14 19:53:01 2022 ] Eval epoch: 18
134
+ [ Wed Sep 14 19:54:35 2022 ] Mean test loss of 796 batches: 3.725764513015747.
135
+ [ Wed Sep 14 19:54:36 2022 ] Top1: 38.57%
136
+ [ Wed Sep 14 19:54:36 2022 ] Top5: 69.67%
137
+ [ Wed Sep 14 19:54:36 2022 ] Training epoch: 19
138
+ [ Wed Sep 14 19:54:58 2022 ] Batch(25/243) done. Loss: 0.5624 lr:0.100000 network_time: 0.0273
139
+ [ Wed Sep 14 19:56:11 2022 ] Batch(125/243) done. Loss: 0.6199 lr:0.100000 network_time: 0.0314
140
+ [ Wed Sep 14 19:57:24 2022 ] Batch(225/243) done. Loss: 0.6703 lr:0.100000 network_time: 0.0268
141
+ [ Wed Sep 14 19:57:36 2022 ] Eval epoch: 19
142
+ [ Wed Sep 14 19:59:11 2022 ] Mean test loss of 796 batches: 3.639695882797241.
143
+ [ Wed Sep 14 19:59:11 2022 ] Top1: 36.78%
144
+ [ Wed Sep 14 19:59:12 2022 ] Top5: 71.43%
145
+ [ Wed Sep 14 19:59:12 2022 ] Training epoch: 20
146
+ [ Wed Sep 14 20:00:16 2022 ] Batch(82/243) done. Loss: 0.4942 lr:0.100000 network_time: 0.0274
147
+ [ Wed Sep 14 20:01:29 2022 ] Batch(182/243) done. Loss: 0.4595 lr:0.100000 network_time: 0.0283
148
+ [ Wed Sep 14 20:02:13 2022 ] Eval epoch: 20
149
+ [ Wed Sep 14 20:03:47 2022 ] Mean test loss of 796 batches: 3.102022886276245.
150
+ [ Wed Sep 14 20:03:47 2022 ] Top1: 37.25%
151
+ [ Wed Sep 14 20:03:48 2022 ] Top5: 69.94%
152
+ [ Wed Sep 14 20:03:48 2022 ] Training epoch: 21
153
+ [ Wed Sep 14 20:04:20 2022 ] Batch(39/243) done. Loss: 0.2484 lr:0.100000 network_time: 0.0263
154
+ [ Wed Sep 14 20:05:33 2022 ] Batch(139/243) done. Loss: 0.5053 lr:0.100000 network_time: 0.0274
155
+ [ Wed Sep 14 20:06:46 2022 ] Batch(239/243) done. Loss: 0.5454 lr:0.100000 network_time: 0.0266
156
+ [ Wed Sep 14 20:06:48 2022 ] Eval epoch: 21
157
+ [ Wed Sep 14 20:08:22 2022 ] Mean test loss of 796 batches: 3.5374462604522705.
158
+ [ Wed Sep 14 20:08:23 2022 ] Top1: 35.02%
159
+ [ Wed Sep 14 20:08:23 2022 ] Top5: 69.82%
160
+ [ Wed Sep 14 20:08:23 2022 ] Training epoch: 22
161
+ [ Wed Sep 14 20:09:37 2022 ] Batch(96/243) done. Loss: 0.5373 lr:0.100000 network_time: 0.0271
162
+ [ Wed Sep 14 20:10:50 2022 ] Batch(196/243) done. Loss: 0.4305 lr:0.100000 network_time: 0.0249
163
+ [ Wed Sep 14 20:11:24 2022 ] Eval epoch: 22
164
+ [ Wed Sep 14 20:12:58 2022 ] Mean test loss of 796 batches: 3.3867008686065674.
165
+ [ Wed Sep 14 20:12:58 2022 ] Top1: 42.28%
166
+ [ Wed Sep 14 20:12:59 2022 ] Top5: 74.57%
167
+ [ Wed Sep 14 20:12:59 2022 ] Training epoch: 23
168
+ [ Wed Sep 14 20:13:41 2022 ] Batch(53/243) done. Loss: 0.4115 lr:0.100000 network_time: 0.0342
169
+ [ Wed Sep 14 20:14:54 2022 ] Batch(153/243) done. Loss: 0.7559 lr:0.100000 network_time: 0.0271
170
+ [ Wed Sep 14 20:15:59 2022 ] Eval epoch: 23
171
+ [ Wed Sep 14 20:17:32 2022 ] Mean test loss of 796 batches: 3.638932228088379.
172
+ [ Wed Sep 14 20:17:33 2022 ] Top1: 38.48%
173
+ [ Wed Sep 14 20:17:33 2022 ] Top5: 71.98%
174
+ [ Wed Sep 14 20:17:33 2022 ] Training epoch: 24
175
+ [ Wed Sep 14 20:17:44 2022 ] Batch(10/243) done. Loss: 0.3779 lr:0.100000 network_time: 0.0281
176
+ [ Wed Sep 14 20:18:57 2022 ] Batch(110/243) done. Loss: 0.4044 lr:0.100000 network_time: 0.0282
177
+ [ Wed Sep 14 20:20:10 2022 ] Batch(210/243) done. Loss: 0.4047 lr:0.100000 network_time: 0.0274
178
+ [ Wed Sep 14 20:20:34 2022 ] Eval epoch: 24
179
+ [ Wed Sep 14 20:22:07 2022 ] Mean test loss of 796 batches: 3.194326162338257.
180
+ [ Wed Sep 14 20:22:08 2022 ] Top1: 41.16%
181
+ [ Wed Sep 14 20:22:08 2022 ] Top5: 73.29%
182
+ [ Wed Sep 14 20:22:08 2022 ] Training epoch: 25
183
+ [ Wed Sep 14 20:23:01 2022 ] Batch(67/243) done. Loss: 0.3711 lr:0.100000 network_time: 0.0270
184
+ [ Wed Sep 14 20:24:13 2022 ] Batch(167/243) done. Loss: 0.4264 lr:0.100000 network_time: 0.0263
185
+ [ Wed Sep 14 20:25:08 2022 ] Eval epoch: 25
186
+ [ Wed Sep 14 20:26:42 2022 ] Mean test loss of 796 batches: 3.711545705795288.
187
+ [ Wed Sep 14 20:26:43 2022 ] Top1: 37.13%
188
+ [ Wed Sep 14 20:26:43 2022 ] Top5: 68.57%
189
+ [ Wed Sep 14 20:26:43 2022 ] Training epoch: 26
190
+ [ Wed Sep 14 20:27:04 2022 ] Batch(24/243) done. Loss: 0.4519 lr:0.100000 network_time: 0.0271
191
+ [ Wed Sep 14 20:28:17 2022 ] Batch(124/243) done. Loss: 0.3969 lr:0.100000 network_time: 0.0267
192
+ [ Wed Sep 14 20:29:30 2022 ] Batch(224/243) done. Loss: 0.2840 lr:0.100000 network_time: 0.0270
193
+ [ Wed Sep 14 20:29:43 2022 ] Eval epoch: 26
194
+ [ Wed Sep 14 20:31:18 2022 ] Mean test loss of 796 batches: 3.380185127258301.
195
+ [ Wed Sep 14 20:31:18 2022 ] Top1: 44.60%
196
+ [ Wed Sep 14 20:31:18 2022 ] Top5: 76.95%
197
+ [ Wed Sep 14 20:31:18 2022 ] Training epoch: 27
198
+ [ Wed Sep 14 20:32:21 2022 ] Batch(81/243) done. Loss: 0.4860 lr:0.100000 network_time: 0.0262
199
+ [ Wed Sep 14 20:33:34 2022 ] Batch(181/243) done. Loss: 0.4455 lr:0.100000 network_time: 0.0433
200
+ [ Wed Sep 14 20:34:19 2022 ] Eval epoch: 27
201
+ [ Wed Sep 14 20:35:53 2022 ] Mean test loss of 796 batches: 3.682363748550415.
202
+ [ Wed Sep 14 20:35:54 2022 ] Top1: 34.85%
203
+ [ Wed Sep 14 20:35:54 2022 ] Top5: 68.94%
204
+ [ Wed Sep 14 20:35:55 2022 ] Training epoch: 28
205
+ [ Wed Sep 14 20:36:26 2022 ] Batch(38/243) done. Loss: 0.2284 lr:0.100000 network_time: 0.0537
206
+ [ Wed Sep 14 20:37:39 2022 ] Batch(138/243) done. Loss: 0.3881 lr:0.100000 network_time: 0.0270
207
+ [ Wed Sep 14 20:38:52 2022 ] Batch(238/243) done. Loss: 0.4989 lr:0.100000 network_time: 0.0272
208
+ [ Wed Sep 14 20:38:55 2022 ] Eval epoch: 28
209
+ [ Wed Sep 14 20:40:29 2022 ] Mean test loss of 796 batches: 4.050222873687744.
210
+ [ Wed Sep 14 20:40:29 2022 ] Top1: 39.72%
211
+ [ Wed Sep 14 20:40:29 2022 ] Top5: 72.02%
212
+ [ Wed Sep 14 20:40:30 2022 ] Training epoch: 29
213
+ [ Wed Sep 14 20:41:42 2022 ] Batch(95/243) done. Loss: 0.2913 lr:0.100000 network_time: 0.0265
214
+ [ Wed Sep 14 20:42:55 2022 ] Batch(195/243) done. Loss: 0.4301 lr:0.100000 network_time: 0.0261
215
+ [ Wed Sep 14 20:43:30 2022 ] Eval epoch: 29
216
+ [ Wed Sep 14 20:45:04 2022 ] Mean test loss of 796 batches: 4.2824883460998535.
217
+ [ Wed Sep 14 20:45:04 2022 ] Top1: 41.94%
218
+ [ Wed Sep 14 20:45:04 2022 ] Top5: 74.24%
219
+ [ Wed Sep 14 20:45:05 2022 ] Training epoch: 30
220
+ [ Wed Sep 14 20:45:46 2022 ] Batch(52/243) done. Loss: 0.3259 lr:0.100000 network_time: 0.0270
221
+ [ Wed Sep 14 20:46:59 2022 ] Batch(152/243) done. Loss: 0.3045 lr:0.100000 network_time: 0.0273
222
+ [ Wed Sep 14 20:48:05 2022 ] Eval epoch: 30
223
+ [ Wed Sep 14 20:49:39 2022 ] Mean test loss of 796 batches: 3.3407092094421387.
224
+ [ Wed Sep 14 20:49:40 2022 ] Top1: 40.77%
225
+ [ Wed Sep 14 20:49:40 2022 ] Top5: 74.75%
226
+ [ Wed Sep 14 20:49:40 2022 ] Training epoch: 31
227
+ [ Wed Sep 14 20:49:50 2022 ] Batch(9/243) done. Loss: 0.4007 lr:0.100000 network_time: 0.0268
228
+ [ Wed Sep 14 20:51:03 2022 ] Batch(109/243) done. Loss: 0.2993 lr:0.100000 network_time: 0.0316
229
+ [ Wed Sep 14 20:52:16 2022 ] Batch(209/243) done. Loss: 0.3673 lr:0.100000 network_time: 0.0272
230
+ [ Wed Sep 14 20:52:41 2022 ] Eval epoch: 31
231
+ [ Wed Sep 14 20:54:15 2022 ] Mean test loss of 796 batches: 3.6521658897399902.
232
+ [ Wed Sep 14 20:54:15 2022 ] Top1: 38.46%
233
+ [ Wed Sep 14 20:54:15 2022 ] Top5: 72.08%
234
+ [ Wed Sep 14 20:54:16 2022 ] Training epoch: 32
235
+ [ Wed Sep 14 20:55:07 2022 ] Batch(66/243) done. Loss: 0.3483 lr:0.100000 network_time: 0.0256
236
+ [ Wed Sep 14 20:56:20 2022 ] Batch(166/243) done. Loss: 0.5043 lr:0.100000 network_time: 0.0265
237
+ [ Wed Sep 14 20:57:16 2022 ] Eval epoch: 32
238
+ [ Wed Sep 14 20:58:49 2022 ] Mean test loss of 796 batches: 3.2773547172546387.
239
+ [ Wed Sep 14 20:58:50 2022 ] Top1: 42.23%
240
+ [ Wed Sep 14 20:58:50 2022 ] Top5: 74.82%
241
+ [ Wed Sep 14 20:58:50 2022 ] Training epoch: 33
242
+ [ Wed Sep 14 20:59:11 2022 ] Batch(23/243) done. Loss: 0.2605 lr:0.100000 network_time: 0.0277
243
+ [ Wed Sep 14 21:00:23 2022 ] Batch(123/243) done. Loss: 0.3847 lr:0.100000 network_time: 0.0264
244
+ [ Wed Sep 14 21:01:36 2022 ] Batch(223/243) done. Loss: 0.5128 lr:0.100000 network_time: 0.0269
245
+ [ Wed Sep 14 21:01:50 2022 ] Eval epoch: 33
246
+ [ Wed Sep 14 21:03:25 2022 ] Mean test loss of 796 batches: 3.2684571743011475.
247
+ [ Wed Sep 14 21:03:25 2022 ] Top1: 41.48%
248
+ [ Wed Sep 14 21:03:26 2022 ] Top5: 73.69%
249
+ [ Wed Sep 14 21:03:26 2022 ] Training epoch: 34
250
+ [ Wed Sep 14 21:04:28 2022 ] Batch(80/243) done. Loss: 0.2067 lr:0.100000 network_time: 0.0268
251
+ [ Wed Sep 14 21:05:41 2022 ] Batch(180/243) done. Loss: 0.3515 lr:0.100000 network_time: 0.0311
252
+ [ Wed Sep 14 21:06:26 2022 ] Eval epoch: 34
253
+ [ Wed Sep 14 21:08:00 2022 ] Mean test loss of 796 batches: 3.645359754562378.
254
+ [ Wed Sep 14 21:08:01 2022 ] Top1: 40.37%
255
+ [ Wed Sep 14 21:08:02 2022 ] Top5: 74.26%
256
+ [ Wed Sep 14 21:08:02 2022 ] Training epoch: 35
257
+ [ Wed Sep 14 21:08:32 2022 ] Batch(37/243) done. Loss: 0.2540 lr:0.100000 network_time: 0.0262
258
+ [ Wed Sep 14 21:09:45 2022 ] Batch(137/243) done. Loss: 0.3441 lr:0.100000 network_time: 0.0319
259
+ [ Wed Sep 14 21:10:58 2022 ] Batch(237/243) done. Loss: 0.4167 lr:0.100000 network_time: 0.0272
260
+ [ Wed Sep 14 21:11:02 2022 ] Eval epoch: 35
261
+ [ Wed Sep 14 21:12:35 2022 ] Mean test loss of 796 batches: 3.4303860664367676.
262
+ [ Wed Sep 14 21:12:36 2022 ] Top1: 40.54%
263
+ [ Wed Sep 14 21:12:36 2022 ] Top5: 74.03%
264
+ [ Wed Sep 14 21:12:36 2022 ] Training epoch: 36
265
+ [ Wed Sep 14 21:13:48 2022 ] Batch(94/243) done. Loss: 0.3030 lr:0.100000 network_time: 0.0314
266
+ [ Wed Sep 14 21:15:01 2022 ] Batch(194/243) done. Loss: 0.3031 lr:0.100000 network_time: 0.0274
267
+ [ Wed Sep 14 21:15:36 2022 ] Eval epoch: 36
268
+ [ Wed Sep 14 21:17:10 2022 ] Mean test loss of 796 batches: 3.0533454418182373.
269
+ [ Wed Sep 14 21:17:10 2022 ] Top1: 42.74%
270
+ [ Wed Sep 14 21:17:11 2022 ] Top5: 74.29%
271
+ [ Wed Sep 14 21:17:11 2022 ] Training epoch: 37
272
+ [ Wed Sep 14 21:17:52 2022 ] Batch(51/243) done. Loss: 0.2052 lr:0.100000 network_time: 0.0289
273
+ [ Wed Sep 14 21:19:05 2022 ] Batch(151/243) done. Loss: 0.3076 lr:0.100000 network_time: 0.0263
274
+ [ Wed Sep 14 21:20:11 2022 ] Eval epoch: 37
275
+ [ Wed Sep 14 21:21:45 2022 ] Mean test loss of 796 batches: 3.8635685443878174.
276
+ [ Wed Sep 14 21:21:46 2022 ] Top1: 38.69%
277
+ [ Wed Sep 14 21:21:46 2022 ] Top5: 73.05%
278
+ [ Wed Sep 14 21:21:46 2022 ] Training epoch: 38
279
+ [ Wed Sep 14 21:21:56 2022 ] Batch(8/243) done. Loss: 0.3111 lr:0.100000 network_time: 0.0273
280
+ [ Wed Sep 14 21:23:09 2022 ] Batch(108/243) done. Loss: 0.3223 lr:0.100000 network_time: 0.0276
281
+ [ Wed Sep 14 21:24:22 2022 ] Batch(208/243) done. Loss: 0.3715 lr:0.100000 network_time: 0.0272
282
+ [ Wed Sep 14 21:24:47 2022 ] Eval epoch: 38
283
+ [ Wed Sep 14 21:26:20 2022 ] Mean test loss of 796 batches: 3.5092737674713135.
284
+ [ Wed Sep 14 21:26:21 2022 ] Top1: 40.92%
285
+ [ Wed Sep 14 21:26:21 2022 ] Top5: 74.36%
286
+ [ Wed Sep 14 21:26:21 2022 ] Training epoch: 39
287
+ [ Wed Sep 14 21:27:12 2022 ] Batch(65/243) done. Loss: 0.3190 lr:0.100000 network_time: 0.0271
288
+ [ Wed Sep 14 21:28:25 2022 ] Batch(165/243) done. Loss: 0.3411 lr:0.100000 network_time: 0.0309
289
+ [ Wed Sep 14 21:29:22 2022 ] Eval epoch: 39
290
+ [ Wed Sep 14 21:30:56 2022 ] Mean test loss of 796 batches: 3.9432618618011475.
291
+ [ Wed Sep 14 21:30:56 2022 ] Top1: 38.59%
292
+ [ Wed Sep 14 21:30:57 2022 ] Top5: 70.74%
293
+ [ Wed Sep 14 21:30:57 2022 ] Training epoch: 40
294
+ [ Wed Sep 14 21:31:17 2022 ] Batch(22/243) done. Loss: 0.2824 lr:0.100000 network_time: 0.0304
295
+ [ Wed Sep 14 21:32:30 2022 ] Batch(122/243) done. Loss: 0.2105 lr:0.100000 network_time: 0.0327
296
+ [ Wed Sep 14 21:33:43 2022 ] Batch(222/243) done. Loss: 0.3019 lr:0.100000 network_time: 0.0264
297
+ [ Wed Sep 14 21:33:57 2022 ] Eval epoch: 40
298
+ [ Wed Sep 14 21:35:31 2022 ] Mean test loss of 796 batches: 3.499183177947998.
299
+ [ Wed Sep 14 21:35:32 2022 ] Top1: 40.04%
300
+ [ Wed Sep 14 21:35:32 2022 ] Top5: 72.85%
301
+ [ Wed Sep 14 21:35:32 2022 ] Training epoch: 41
302
+ [ Wed Sep 14 21:36:33 2022 ] Batch(79/243) done. Loss: 0.1238 lr:0.100000 network_time: 0.0262
303
+ [ Wed Sep 14 21:37:46 2022 ] Batch(179/243) done. Loss: 0.4174 lr:0.100000 network_time: 0.0323
304
+ [ Wed Sep 14 21:38:32 2022 ] Eval epoch: 41
305
+ [ Wed Sep 14 21:40:06 2022 ] Mean test loss of 796 batches: 3.4872095584869385.
306
+ [ Wed Sep 14 21:40:07 2022 ] Top1: 37.58%
307
+ [ Wed Sep 14 21:40:07 2022 ] Top5: 70.06%
308
+ [ Wed Sep 14 21:40:07 2022 ] Training epoch: 42
309
+ [ Wed Sep 14 21:40:37 2022 ] Batch(36/243) done. Loss: 0.2036 lr:0.100000 network_time: 0.0276
310
+ [ Wed Sep 14 21:41:50 2022 ] Batch(136/243) done. Loss: 0.3632 lr:0.100000 network_time: 0.0331
311
+ [ Wed Sep 14 21:43:03 2022 ] Batch(236/243) done. Loss: 0.3208 lr:0.100000 network_time: 0.0316
312
+ [ Wed Sep 14 21:43:08 2022 ] Eval epoch: 42
313
+ [ Wed Sep 14 21:44:42 2022 ] Mean test loss of 796 batches: 3.3474175930023193.
314
+ [ Wed Sep 14 21:44:42 2022 ] Top1: 39.01%
315
+ [ Wed Sep 14 21:44:42 2022 ] Top5: 72.59%
316
+ [ Wed Sep 14 21:44:43 2022 ] Training epoch: 43
317
+ [ Wed Sep 14 21:45:54 2022 ] Batch(93/243) done. Loss: 0.2545 lr:0.100000 network_time: 0.0256
318
+ [ Wed Sep 14 21:47:07 2022 ] Batch(193/243) done. Loss: 0.3229 lr:0.100000 network_time: 0.0299
319
+ [ Wed Sep 14 21:47:43 2022 ] Eval epoch: 43
320
+ [ Wed Sep 14 21:49:17 2022 ] Mean test loss of 796 batches: 3.7657318115234375.
321
+ [ Wed Sep 14 21:49:17 2022 ] Top1: 37.50%
322
+ [ Wed Sep 14 21:49:17 2022 ] Top5: 69.66%
323
+ [ Wed Sep 14 21:49:18 2022 ] Training epoch: 44
324
+ [ Wed Sep 14 21:49:58 2022 ] Batch(50/243) done. Loss: 0.1688 lr:0.100000 network_time: 0.0268
325
+ [ Wed Sep 14 21:51:11 2022 ] Batch(150/243) done. Loss: 0.2368 lr:0.100000 network_time: 0.0270
326
+ [ Wed Sep 14 21:52:18 2022 ] Eval epoch: 44
327
+ [ Wed Sep 14 21:53:52 2022 ] Mean test loss of 796 batches: 3.6248888969421387.
328
+ [ Wed Sep 14 21:53:52 2022 ] Top1: 38.95%
329
+ [ Wed Sep 14 21:53:52 2022 ] Top5: 70.46%
330
+ [ Wed Sep 14 21:53:53 2022 ] Training epoch: 45
331
+ [ Wed Sep 14 21:54:01 2022 ] Batch(7/243) done. Loss: 0.1919 lr:0.100000 network_time: 0.0266
332
+ [ Wed Sep 14 21:55:14 2022 ] Batch(107/243) done. Loss: 0.2056 lr:0.100000 network_time: 0.0267
333
+ [ Wed Sep 14 21:56:27 2022 ] Batch(207/243) done. Loss: 0.2504 lr:0.100000 network_time: 0.0265
334
+ [ Wed Sep 14 21:56:52 2022 ] Eval epoch: 45
335
+ [ Wed Sep 14 21:58:27 2022 ] Mean test loss of 796 batches: 3.2078866958618164.
336
+ [ Wed Sep 14 21:58:27 2022 ] Top1: 39.89%
337
+ [ Wed Sep 14 21:58:27 2022 ] Top5: 71.88%
338
+ [ Wed Sep 14 21:58:27 2022 ] Training epoch: 46
339
+ [ Wed Sep 14 21:59:18 2022 ] Batch(64/243) done. Loss: 0.3002 lr:0.100000 network_time: 0.0321
340
+ [ Wed Sep 14 22:00:31 2022 ] Batch(164/243) done. Loss: 0.2156 lr:0.100000 network_time: 0.0267
341
+ [ Wed Sep 14 22:01:28 2022 ] Eval epoch: 46
342
+ [ Wed Sep 14 22:03:01 2022 ] Mean test loss of 796 batches: 3.4447925090789795.
343
+ [ Wed Sep 14 22:03:01 2022 ] Top1: 43.89%
344
+ [ Wed Sep 14 22:03:02 2022 ] Top5: 75.43%
345
+ [ Wed Sep 14 22:03:02 2022 ] Training epoch: 47
346
+ [ Wed Sep 14 22:03:21 2022 ] Batch(21/243) done. Loss: 0.1526 lr:0.100000 network_time: 0.0279
347
+ [ Wed Sep 14 22:04:34 2022 ] Batch(121/243) done. Loss: 0.3723 lr:0.100000 network_time: 0.0273
348
+ [ Wed Sep 14 22:05:47 2022 ] Batch(221/243) done. Loss: 0.2064 lr:0.100000 network_time: 0.0259
349
+ [ Wed Sep 14 22:06:02 2022 ] Eval epoch: 47
350
+ [ Wed Sep 14 22:07:36 2022 ] Mean test loss of 796 batches: 4.070017337799072.
351
+ [ Wed Sep 14 22:07:37 2022 ] Top1: 43.00%
352
+ [ Wed Sep 14 22:07:37 2022 ] Top5: 74.75%
353
+ [ Wed Sep 14 22:07:38 2022 ] Training epoch: 48
354
+ [ Wed Sep 14 22:08:38 2022 ] Batch(78/243) done. Loss: 0.1903 lr:0.100000 network_time: 0.0329
355
+ [ Wed Sep 14 22:09:51 2022 ] Batch(178/243) done. Loss: 0.3535 lr:0.100000 network_time: 0.0267
356
+ [ Wed Sep 14 22:10:38 2022 ] Eval epoch: 48
357
+ [ Wed Sep 14 22:12:12 2022 ] Mean test loss of 796 batches: 3.5496559143066406.
358
+ [ Wed Sep 14 22:12:12 2022 ] Top1: 39.57%
359
+ [ Wed Sep 14 22:12:12 2022 ] Top5: 71.81%
360
+ [ Wed Sep 14 22:12:13 2022 ] Training epoch: 49
361
+ [ Wed Sep 14 22:12:42 2022 ] Batch(35/243) done. Loss: 0.3369 lr:0.100000 network_time: 0.0299
362
+ [ Wed Sep 14 22:13:54 2022 ] Batch(135/243) done. Loss: 0.2675 lr:0.100000 network_time: 0.0278
363
+ [ Wed Sep 14 22:15:07 2022 ] Batch(235/243) done. Loss: 0.2864 lr:0.100000 network_time: 0.0274
364
+ [ Wed Sep 14 22:15:13 2022 ] Eval epoch: 49
365
+ [ Wed Sep 14 22:16:46 2022 ] Mean test loss of 796 batches: 3.7140417098999023.
366
+ [ Wed Sep 14 22:16:47 2022 ] Top1: 38.81%
367
+ [ Wed Sep 14 22:16:47 2022 ] Top5: 71.16%
368
+ [ Wed Sep 14 22:16:47 2022 ] Training epoch: 50
369
+ [ Wed Sep 14 22:17:58 2022 ] Batch(92/243) done. Loss: 0.2630 lr:0.100000 network_time: 0.0267
370
+ [ Wed Sep 14 22:19:11 2022 ] Batch(192/243) done. Loss: 0.2089 lr:0.100000 network_time: 0.0326
371
+ [ Wed Sep 14 22:19:47 2022 ] Eval epoch: 50
372
+ [ Wed Sep 14 22:21:21 2022 ] Mean test loss of 796 batches: 3.919886350631714.
373
+ [ Wed Sep 14 22:21:21 2022 ] Top1: 40.21%
374
+ [ Wed Sep 14 22:21:22 2022 ] Top5: 73.40%
375
+ [ Wed Sep 14 22:21:22 2022 ] Training epoch: 51
376
+ [ Wed Sep 14 22:22:01 2022 ] Batch(49/243) done. Loss: 0.2332 lr:0.100000 network_time: 0.0306
377
+ [ Wed Sep 14 22:23:14 2022 ] Batch(149/243) done. Loss: 0.4120 lr:0.100000 network_time: 0.0270
378
+ [ Wed Sep 14 22:24:22 2022 ] Eval epoch: 51
379
+ [ Wed Sep 14 22:25:56 2022 ] Mean test loss of 796 batches: 3.310202121734619.
380
+ [ Wed Sep 14 22:25:56 2022 ] Top1: 45.68%
381
+ [ Wed Sep 14 22:25:57 2022 ] Top5: 76.46%
382
+ [ Wed Sep 14 22:25:57 2022 ] Training epoch: 52
383
+ [ Wed Sep 14 22:26:05 2022 ] Batch(6/243) done. Loss: 0.1790 lr:0.100000 network_time: 0.0275
384
+ [ Wed Sep 14 22:27:18 2022 ] Batch(106/243) done. Loss: 0.1615 lr:0.100000 network_time: 0.0282
385
+ [ Wed Sep 14 22:28:31 2022 ] Batch(206/243) done. Loss: 0.2678 lr:0.100000 network_time: 0.0269
386
+ [ Wed Sep 14 22:28:57 2022 ] Eval epoch: 52
387
+ [ Wed Sep 14 22:30:31 2022 ] Mean test loss of 796 batches: 3.7315006256103516.
388
+ [ Wed Sep 14 22:30:32 2022 ] Top1: 38.94%
389
+ [ Wed Sep 14 22:30:32 2022 ] Top5: 71.32%
390
+ [ Wed Sep 14 22:30:32 2022 ] Training epoch: 53
391
+ [ Wed Sep 14 22:31:22 2022 ] Batch(63/243) done. Loss: 0.1572 lr:0.100000 network_time: 0.0271
392
+ [ Wed Sep 14 22:32:35 2022 ] Batch(163/243) done. Loss: 0.2304 lr:0.100000 network_time: 0.0270
393
+ [ Wed Sep 14 22:33:32 2022 ] Eval epoch: 53
394
+ [ Wed Sep 14 22:35:06 2022 ] Mean test loss of 796 batches: 3.6942200660705566.
395
+ [ Wed Sep 14 22:35:07 2022 ] Top1: 42.98%
396
+ [ Wed Sep 14 22:35:07 2022 ] Top5: 74.85%
397
+ [ Wed Sep 14 22:35:08 2022 ] Training epoch: 54
398
+ [ Wed Sep 14 22:35:26 2022 ] Batch(20/243) done. Loss: 0.1527 lr:0.100000 network_time: 0.0262
399
+ [ Wed Sep 14 22:36:39 2022 ] Batch(120/243) done. Loss: 0.1793 lr:0.100000 network_time: 0.0264
400
+ [ Wed Sep 14 22:37:52 2022 ] Batch(220/243) done. Loss: 0.2885 lr:0.100000 network_time: 0.0277
401
+ [ Wed Sep 14 22:38:08 2022 ] Eval epoch: 54
402
+ [ Wed Sep 14 22:39:42 2022 ] Mean test loss of 796 batches: 3.254549026489258.
403
+ [ Wed Sep 14 22:39:42 2022 ] Top1: 41.90%
404
+ [ Wed Sep 14 22:39:43 2022 ] Top5: 72.66%
405
+ [ Wed Sep 14 22:39:43 2022 ] Training epoch: 55
406
+ [ Wed Sep 14 22:40:43 2022 ] Batch(77/243) done. Loss: 0.1745 lr:0.100000 network_time: 0.0272
407
+ [ Wed Sep 14 22:41:56 2022 ] Batch(177/243) done. Loss: 0.2407 lr:0.100000 network_time: 0.0273
408
+ [ Wed Sep 14 22:42:43 2022 ] Eval epoch: 55
409
+ [ Wed Sep 14 22:44:18 2022 ] Mean test loss of 796 batches: 3.397036075592041.
410
+ [ Wed Sep 14 22:44:19 2022 ] Top1: 46.16%
411
+ [ Wed Sep 14 22:44:19 2022 ] Top5: 77.40%
412
+ [ Wed Sep 14 22:44:19 2022 ] Training epoch: 56
413
+ [ Wed Sep 14 22:44:48 2022 ] Batch(34/243) done. Loss: 0.1203 lr:0.100000 network_time: 0.0263
414
+ [ Wed Sep 14 22:46:00 2022 ] Batch(134/243) done. Loss: 0.1849 lr:0.100000 network_time: 0.0397
415
+ [ Wed Sep 14 22:47:13 2022 ] Batch(234/243) done. Loss: 0.1408 lr:0.100000 network_time: 0.0278
416
+ [ Wed Sep 14 22:47:19 2022 ] Eval epoch: 56
417
+ [ Wed Sep 14 22:48:53 2022 ] Mean test loss of 796 batches: 3.7120611667633057.
418
+ [ Wed Sep 14 22:48:54 2022 ] Top1: 39.24%
419
+ [ Wed Sep 14 22:48:54 2022 ] Top5: 71.20%
420
+ [ Wed Sep 14 22:48:54 2022 ] Training epoch: 57
421
+ [ Wed Sep 14 22:50:04 2022 ] Batch(91/243) done. Loss: 0.2682 lr:0.100000 network_time: 0.0271
422
+ [ Wed Sep 14 22:51:17 2022 ] Batch(191/243) done. Loss: 0.4857 lr:0.100000 network_time: 0.0313
423
+ [ Wed Sep 14 22:51:54 2022 ] Eval epoch: 57
424
+ [ Wed Sep 14 22:53:28 2022 ] Mean test loss of 796 batches: 4.152618408203125.
425
+ [ Wed Sep 14 22:53:28 2022 ] Top1: 40.22%
426
+ [ Wed Sep 14 22:53:29 2022 ] Top5: 73.39%
427
+ [ Wed Sep 14 22:53:29 2022 ] Training epoch: 58
428
+ [ Wed Sep 14 22:54:08 2022 ] Batch(48/243) done. Loss: 0.2004 lr:0.100000 network_time: 0.0270
429
+ [ Wed Sep 14 22:55:20 2022 ] Batch(148/243) done. Loss: 0.1205 lr:0.100000 network_time: 0.0265
430
+ [ Wed Sep 14 22:56:29 2022 ] Eval epoch: 58
431
+ [ Wed Sep 14 22:58:03 2022 ] Mean test loss of 796 batches: 3.781721353530884.
432
+ [ Wed Sep 14 22:58:04 2022 ] Top1: 36.52%
433
+ [ Wed Sep 14 22:58:04 2022 ] Top5: 69.00%
434
+ [ Wed Sep 14 22:58:04 2022 ] Training epoch: 59
435
+ [ Wed Sep 14 22:58:12 2022 ] Batch(5/243) done. Loss: 0.3946 lr:0.100000 network_time: 0.0266
436
+ [ Wed Sep 14 22:59:25 2022 ] Batch(105/243) done. Loss: 0.2194 lr:0.100000 network_time: 0.0314
437
+ [ Wed Sep 14 23:00:38 2022 ] Batch(205/243) done. Loss: 0.2770 lr:0.100000 network_time: 0.0280
438
+ [ Wed Sep 14 23:01:05 2022 ] Eval epoch: 59
439
+ [ Wed Sep 14 23:02:39 2022 ] Mean test loss of 796 batches: 3.6728172302246094.
440
+ [ Wed Sep 14 23:02:40 2022 ] Top1: 42.16%
441
+ [ Wed Sep 14 23:02:40 2022 ] Top5: 75.15%
442
+ [ Wed Sep 14 23:02:40 2022 ] Training epoch: 60
443
+ [ Wed Sep 14 23:03:29 2022 ] Batch(62/243) done. Loss: 0.2575 lr:0.100000 network_time: 0.0267
444
+ [ Wed Sep 14 23:04:42 2022 ] Batch(162/243) done. Loss: 0.2812 lr:0.100000 network_time: 0.0264
445
+ [ Wed Sep 14 23:05:40 2022 ] Eval epoch: 60
446
+ [ Wed Sep 14 23:07:15 2022 ] Mean test loss of 796 batches: 3.4849016666412354.
447
+ [ Wed Sep 14 23:07:15 2022 ] Top1: 42.90%
448
+ [ Wed Sep 14 23:07:16 2022 ] Top5: 75.80%
449
+ [ Wed Sep 14 23:07:16 2022 ] Training epoch: 61
450
+ [ Wed Sep 14 23:07:33 2022 ] Batch(19/243) done. Loss: 0.2216 lr:0.010000 network_time: 0.0273
451
+ [ Wed Sep 14 23:08:46 2022 ] Batch(119/243) done. Loss: 0.0335 lr:0.010000 network_time: 0.0273
452
+ [ Wed Sep 14 23:09:59 2022 ] Batch(219/243) done. Loss: 0.0458 lr:0.010000 network_time: 0.0300
453
+ [ Wed Sep 14 23:10:16 2022 ] Eval epoch: 61
454
+ [ Wed Sep 14 23:11:50 2022 ] Mean test loss of 796 batches: 3.100501537322998.
455
+ [ Wed Sep 14 23:11:50 2022 ] Top1: 49.91%
456
+ [ Wed Sep 14 23:11:51 2022 ] Top5: 81.10%
457
+ [ Wed Sep 14 23:11:51 2022 ] Training epoch: 62
458
+ [ Wed Sep 14 23:12:50 2022 ] Batch(76/243) done. Loss: 0.0528 lr:0.010000 network_time: 0.0337
459
+ [ Wed Sep 14 23:14:03 2022 ] Batch(176/243) done. Loss: 0.0395 lr:0.010000 network_time: 0.0262
460
+ [ Wed Sep 14 23:14:51 2022 ] Eval epoch: 62
461
+ [ Wed Sep 14 23:16:25 2022 ] Mean test loss of 796 batches: 2.988133430480957.
462
+ [ Wed Sep 14 23:16:26 2022 ] Top1: 50.26%
463
+ [ Wed Sep 14 23:16:26 2022 ] Top5: 81.17%
464
+ [ Wed Sep 14 23:16:26 2022 ] Training epoch: 63
465
+ [ Wed Sep 14 23:16:54 2022 ] Batch(33/243) done. Loss: 0.0427 lr:0.010000 network_time: 0.0270
466
+ [ Wed Sep 14 23:18:07 2022 ] Batch(133/243) done. Loss: 0.0722 lr:0.010000 network_time: 0.0273
467
+ [ Wed Sep 14 23:19:20 2022 ] Batch(233/243) done. Loss: 0.0502 lr:0.010000 network_time: 0.0274
468
+ [ Wed Sep 14 23:19:27 2022 ] Eval epoch: 63
469
+ [ Wed Sep 14 23:21:01 2022 ] Mean test loss of 796 batches: 2.960383653640747.
470
+ [ Wed Sep 14 23:21:01 2022 ] Top1: 50.49%
471
+ [ Wed Sep 14 23:21:02 2022 ] Top5: 81.23%
472
+ [ Wed Sep 14 23:21:02 2022 ] Training epoch: 64
473
+ [ Wed Sep 14 23:22:11 2022 ] Batch(90/243) done. Loss: 0.0194 lr:0.010000 network_time: 0.0266
474
+ [ Wed Sep 14 23:23:24 2022 ] Batch(190/243) done. Loss: 0.0085 lr:0.010000 network_time: 0.0267
475
+ [ Wed Sep 14 23:24:02 2022 ] Eval epoch: 64
476
+ [ Wed Sep 14 23:25:37 2022 ] Mean test loss of 796 batches: 2.9251861572265625.
477
+ [ Wed Sep 14 23:25:37 2022 ] Top1: 49.86%
478
+ [ Wed Sep 14 23:25:38 2022 ] Top5: 80.80%
479
+ [ Wed Sep 14 23:25:38 2022 ] Training epoch: 65
480
+ [ Wed Sep 14 23:26:16 2022 ] Batch(47/243) done. Loss: 0.0262 lr:0.010000 network_time: 0.0267
481
+ [ Wed Sep 14 23:27:29 2022 ] Batch(147/243) done. Loss: 0.0334 lr:0.010000 network_time: 0.0302
482
+ [ Wed Sep 14 23:28:38 2022 ] Eval epoch: 65
483
+ [ Wed Sep 14 23:30:13 2022 ] Mean test loss of 796 batches: 3.311321973800659.
484
+ [ Wed Sep 14 23:30:13 2022 ] Top1: 50.82%
485
+ [ Wed Sep 14 23:30:13 2022 ] Top5: 81.65%
486
+ [ Wed Sep 14 23:30:14 2022 ] Training epoch: 66
487
+ [ Wed Sep 14 23:30:20 2022 ] Batch(4/243) done. Loss: 0.0525 lr:0.010000 network_time: 0.0270
488
+ [ Wed Sep 14 23:31:33 2022 ] Batch(104/243) done. Loss: 0.0223 lr:0.010000 network_time: 0.0279
489
+ [ Wed Sep 14 23:32:46 2022 ] Batch(204/243) done. Loss: 0.0726 lr:0.010000 network_time: 0.0264
490
+ [ Wed Sep 14 23:33:14 2022 ] Eval epoch: 66
491
+ [ Wed Sep 14 23:34:48 2022 ] Mean test loss of 796 batches: 3.099126100540161.
492
+ [ Wed Sep 14 23:34:48 2022 ] Top1: 50.70%
493
+ [ Wed Sep 14 23:34:49 2022 ] Top5: 81.42%
494
+ [ Wed Sep 14 23:34:49 2022 ] Training epoch: 67
495
+ [ Wed Sep 14 23:35:37 2022 ] Batch(61/243) done. Loss: 0.0213 lr:0.010000 network_time: 0.0261
496
+ [ Wed Sep 14 23:36:50 2022 ] Batch(161/243) done. Loss: 0.0166 lr:0.010000 network_time: 0.0277
497
+ [ Wed Sep 14 23:37:49 2022 ] Eval epoch: 67
498
+ [ Wed Sep 14 23:39:23 2022 ] Mean test loss of 796 batches: 2.9924604892730713.
499
+ [ Wed Sep 14 23:39:23 2022 ] Top1: 50.13%
500
+ [ Wed Sep 14 23:39:24 2022 ] Top5: 81.19%
501
+ [ Wed Sep 14 23:39:24 2022 ] Training epoch: 68
502
+ [ Wed Sep 14 23:39:40 2022 ] Batch(18/243) done. Loss: 0.0176 lr:0.010000 network_time: 0.0314
503
+ [ Wed Sep 14 23:40:53 2022 ] Batch(118/243) done. Loss: 0.0206 lr:0.010000 network_time: 0.0297
504
+ [ Wed Sep 14 23:42:06 2022 ] Batch(218/243) done. Loss: 0.0430 lr:0.010000 network_time: 0.0269
505
+ [ Wed Sep 14 23:42:24 2022 ] Eval epoch: 68
506
+ [ Wed Sep 14 23:43:58 2022 ] Mean test loss of 796 batches: 3.0478153228759766.
507
+ [ Wed Sep 14 23:43:58 2022 ] Top1: 49.25%
508
+ [ Wed Sep 14 23:43:58 2022 ] Top5: 80.62%
509
+ [ Wed Sep 14 23:43:58 2022 ] Training epoch: 69
510
+ [ Wed Sep 14 23:44:57 2022 ] Batch(75/243) done. Loss: 0.0174 lr:0.010000 network_time: 0.0304
511
+ [ Wed Sep 14 23:46:10 2022 ] Batch(175/243) done. Loss: 0.0297 lr:0.010000 network_time: 0.0259
512
+ [ Wed Sep 14 23:46:59 2022 ] Eval epoch: 69
513
+ [ Wed Sep 14 23:48:33 2022 ] Mean test loss of 796 batches: 2.941301107406616.
514
+ [ Wed Sep 14 23:48:33 2022 ] Top1: 50.33%
515
+ [ Wed Sep 14 23:48:33 2022 ] Top5: 81.09%
516
+ [ Wed Sep 14 23:48:34 2022 ] Training epoch: 70
517
+ [ Wed Sep 14 23:49:00 2022 ] Batch(32/243) done. Loss: 0.0141 lr:0.010000 network_time: 0.0259
518
+ [ Wed Sep 14 23:50:13 2022 ] Batch(132/243) done. Loss: 0.0178 lr:0.010000 network_time: 0.0269
519
+ [ Wed Sep 14 23:51:26 2022 ] Batch(232/243) done. Loss: 0.0090 lr:0.010000 network_time: 0.0488
520
+ [ Wed Sep 14 23:51:34 2022 ] Eval epoch: 70
521
+ [ Wed Sep 14 23:53:08 2022 ] Mean test loss of 796 batches: 3.021989107131958.
522
+ [ Wed Sep 14 23:53:08 2022 ] Top1: 50.85%
523
+ [ Wed Sep 14 23:53:09 2022 ] Top5: 81.28%
524
+ [ Wed Sep 14 23:53:09 2022 ] Training epoch: 71
525
+ [ Wed Sep 14 23:54:17 2022 ] Batch(89/243) done. Loss: 0.0106 lr:0.010000 network_time: 0.0276
526
+ [ Wed Sep 14 23:55:30 2022 ] Batch(189/243) done. Loss: 0.0188 lr:0.010000 network_time: 0.0271
527
+ [ Wed Sep 14 23:56:09 2022 ] Eval epoch: 71
528
+ [ Wed Sep 14 23:57:43 2022 ] Mean test loss of 796 batches: 3.085793972015381.
529
+ [ Wed Sep 14 23:57:43 2022 ] Top1: 47.16%
530
+ [ Wed Sep 14 23:57:43 2022 ] Top5: 79.04%
531
+ [ Wed Sep 14 23:57:44 2022 ] Training epoch: 72
532
+ [ Wed Sep 14 23:58:21 2022 ] Batch(46/243) done. Loss: 0.0133 lr:0.010000 network_time: 0.0310
533
+ [ Wed Sep 14 23:59:34 2022 ] Batch(146/243) done. Loss: 0.0632 lr:0.010000 network_time: 0.0265
534
+ [ Thu Sep 15 00:00:44 2022 ] Eval epoch: 72
535
+ [ Thu Sep 15 00:02:17 2022 ] Mean test loss of 796 batches: 2.990631580352783.
536
+ [ Thu Sep 15 00:02:18 2022 ] Top1: 50.22%
537
+ [ Thu Sep 15 00:02:18 2022 ] Top5: 81.02%
538
+ [ Thu Sep 15 00:02:18 2022 ] Training epoch: 73
539
+ [ Thu Sep 15 00:02:24 2022 ] Batch(3/243) done. Loss: 0.0106 lr:0.010000 network_time: 0.0264
540
+ [ Thu Sep 15 00:03:37 2022 ] Batch(103/243) done. Loss: 0.0126 lr:0.010000 network_time: 0.0268
541
+ [ Thu Sep 15 00:04:50 2022 ] Batch(203/243) done. Loss: 0.0205 lr:0.010000 network_time: 0.0325
542
+ [ Thu Sep 15 00:05:19 2022 ] Eval epoch: 73
543
+ [ Thu Sep 15 00:06:52 2022 ] Mean test loss of 796 batches: 3.108754873275757.
544
+ [ Thu Sep 15 00:06:53 2022 ] Top1: 50.36%
545
+ [ Thu Sep 15 00:06:53 2022 ] Top5: 81.13%
546
+ [ Thu Sep 15 00:06:54 2022 ] Training epoch: 74
547
+ [ Thu Sep 15 00:07:41 2022 ] Batch(60/243) done. Loss: 0.0145 lr:0.010000 network_time: 0.0267
548
+ [ Thu Sep 15 00:08:54 2022 ] Batch(160/243) done. Loss: 0.0107 lr:0.010000 network_time: 0.0300
549
+ [ Thu Sep 15 00:09:54 2022 ] Eval epoch: 74
550
+ [ Thu Sep 15 00:11:28 2022 ] Mean test loss of 796 batches: 3.3555028438568115.
551
+ [ Thu Sep 15 00:11:28 2022 ] Top1: 51.40%
552
+ [ Thu Sep 15 00:11:29 2022 ] Top5: 81.48%
553
+ [ Thu Sep 15 00:11:29 2022 ] Training epoch: 75
554
+ [ Thu Sep 15 00:11:45 2022 ] Batch(17/243) done. Loss: 0.0042 lr:0.010000 network_time: 0.0271
555
+ [ Thu Sep 15 00:12:58 2022 ] Batch(117/243) done. Loss: 0.0155 lr:0.010000 network_time: 0.0277
556
+ [ Thu Sep 15 00:14:10 2022 ] Batch(217/243) done. Loss: 0.0068 lr:0.010000 network_time: 0.0319
557
+ [ Thu Sep 15 00:14:29 2022 ] Eval epoch: 75
558
+ [ Thu Sep 15 00:16:03 2022 ] Mean test loss of 796 batches: 3.2424283027648926.
559
+ [ Thu Sep 15 00:16:03 2022 ] Top1: 50.99%
560
+ [ Thu Sep 15 00:16:03 2022 ] Top5: 81.46%
561
+ [ Thu Sep 15 00:16:04 2022 ] Training epoch: 76
562
+ [ Thu Sep 15 00:17:01 2022 ] Batch(74/243) done. Loss: 0.0112 lr:0.010000 network_time: 0.0265
563
+ [ Thu Sep 15 00:18:14 2022 ] Batch(174/243) done. Loss: 0.0177 lr:0.010000 network_time: 0.0265
564
+ [ Thu Sep 15 00:19:04 2022 ] Eval epoch: 76
565
+ [ Thu Sep 15 00:20:38 2022 ] Mean test loss of 796 batches: 2.9780657291412354.
566
+ [ Thu Sep 15 00:20:38 2022 ] Top1: 50.22%
567
+ [ Thu Sep 15 00:20:39 2022 ] Top5: 80.99%
568
+ [ Thu Sep 15 00:20:39 2022 ] Training epoch: 77
569
+ [ Thu Sep 15 00:21:05 2022 ] Batch(31/243) done. Loss: 0.0079 lr:0.010000 network_time: 0.0268
570
+ [ Thu Sep 15 00:22:18 2022 ] Batch(131/243) done. Loss: 0.0068 lr:0.010000 network_time: 0.0275
571
+ [ Thu Sep 15 00:23:31 2022 ] Batch(231/243) done. Loss: 0.0154 lr:0.010000 network_time: 0.0319
572
+ [ Thu Sep 15 00:23:39 2022 ] Eval epoch: 77
573
+ [ Thu Sep 15 00:25:13 2022 ] Mean test loss of 796 batches: 3.0957376956939697.
574
+ [ Thu Sep 15 00:25:13 2022 ] Top1: 49.45%
575
+ [ Thu Sep 15 00:25:14 2022 ] Top5: 80.83%
576
+ [ Thu Sep 15 00:25:14 2022 ] Training epoch: 78
577
+ [ Thu Sep 15 00:26:22 2022 ] Batch(88/243) done. Loss: 0.0191 lr:0.010000 network_time: 0.0254
578
+ [ Thu Sep 15 00:27:34 2022 ] Batch(188/243) done. Loss: 0.0111 lr:0.010000 network_time: 0.0311
579
+ [ Thu Sep 15 00:28:14 2022 ] Eval epoch: 78
580
+ [ Thu Sep 15 00:29:48 2022 ] Mean test loss of 796 batches: 3.047116994857788.
581
+ [ Thu Sep 15 00:29:49 2022 ] Top1: 50.42%
582
+ [ Thu Sep 15 00:29:49 2022 ] Top5: 81.01%
583
+ [ Thu Sep 15 00:29:49 2022 ] Training epoch: 79
584
+ [ Thu Sep 15 00:30:26 2022 ] Batch(45/243) done. Loss: 0.0089 lr:0.010000 network_time: 0.0276
585
+ [ Thu Sep 15 00:31:39 2022 ] Batch(145/243) done. Loss: 0.0070 lr:0.010000 network_time: 0.0274
586
+ [ Thu Sep 15 00:32:49 2022 ] Eval epoch: 79
587
+ [ Thu Sep 15 00:34:23 2022 ] Mean test loss of 796 batches: 3.4280073642730713.
588
+ [ Thu Sep 15 00:34:24 2022 ] Top1: 50.84%
589
+ [ Thu Sep 15 00:34:24 2022 ] Top5: 81.25%
590
+ [ Thu Sep 15 00:34:24 2022 ] Training epoch: 80
591
+ [ Thu Sep 15 00:34:29 2022 ] Batch(2/243) done. Loss: 0.0102 lr:0.010000 network_time: 0.0335
592
+ [ Thu Sep 15 00:35:42 2022 ] Batch(102/243) done. Loss: 0.0069 lr:0.010000 network_time: 0.0271
593
+ [ Thu Sep 15 00:36:55 2022 ] Batch(202/243) done. Loss: 0.0106 lr:0.010000 network_time: 0.0268
594
+ [ Thu Sep 15 00:37:25 2022 ] Eval epoch: 80
595
+ [ Thu Sep 15 00:38:59 2022 ] Mean test loss of 796 batches: 3.1443278789520264.
596
+ [ Thu Sep 15 00:38:59 2022 ] Top1: 50.66%
597
+ [ Thu Sep 15 00:38:59 2022 ] Top5: 81.21%
598
+ [ Thu Sep 15 00:39:00 2022 ] Training epoch: 81
599
+ [ Thu Sep 15 00:39:46 2022 ] Batch(59/243) done. Loss: 0.0107 lr:0.001000 network_time: 0.0325
600
+ [ Thu Sep 15 00:40:59 2022 ] Batch(159/243) done. Loss: 0.0050 lr:0.001000 network_time: 0.0312
601
+ [ Thu Sep 15 00:42:00 2022 ] Eval epoch: 81
602
+ [ Thu Sep 15 00:43:33 2022 ] Mean test loss of 796 batches: 3.0960147380828857.
603
+ [ Thu Sep 15 00:43:34 2022 ] Top1: 50.55%
604
+ [ Thu Sep 15 00:43:34 2022 ] Top5: 81.34%
605
+ [ Thu Sep 15 00:43:34 2022 ] Training epoch: 82
606
+ [ Thu Sep 15 00:43:49 2022 ] Batch(16/243) done. Loss: 0.0087 lr:0.001000 network_time: 0.0273
607
+ [ Thu Sep 15 00:45:02 2022 ] Batch(116/243) done. Loss: 0.0053 lr:0.001000 network_time: 0.0307
608
+ [ Thu Sep 15 00:46:15 2022 ] Batch(216/243) done. Loss: 0.0135 lr:0.001000 network_time: 0.0360
609
+ [ Thu Sep 15 00:46:34 2022 ] Eval epoch: 82
610
+ [ Thu Sep 15 00:48:08 2022 ] Mean test loss of 796 batches: 3.263976812362671.
611
+ [ Thu Sep 15 00:48:08 2022 ] Top1: 50.82%
612
+ [ Thu Sep 15 00:48:09 2022 ] Top5: 81.38%
613
+ [ Thu Sep 15 00:48:09 2022 ] Training epoch: 83
614
+ [ Thu Sep 15 00:49:06 2022 ] Batch(73/243) done. Loss: 0.0114 lr:0.001000 network_time: 0.0270
615
+ [ Thu Sep 15 00:50:19 2022 ] Batch(173/243) done. Loss: 0.0032 lr:0.001000 network_time: 0.0356
616
+ [ Thu Sep 15 00:51:09 2022 ] Eval epoch: 83
617
+ [ Thu Sep 15 00:52:43 2022 ] Mean test loss of 796 batches: 3.1671903133392334.
618
+ [ Thu Sep 15 00:52:44 2022 ] Top1: 51.15%
619
+ [ Thu Sep 15 00:52:44 2022 ] Top5: 81.39%
620
+ [ Thu Sep 15 00:52:44 2022 ] Training epoch: 84
621
+ [ Thu Sep 15 00:53:10 2022 ] Batch(30/243) done. Loss: 0.0066 lr:0.001000 network_time: 0.0278
622
+ [ Thu Sep 15 00:54:22 2022 ] Batch(130/243) done. Loss: 0.0092 lr:0.001000 network_time: 0.0322
623
+ [ Thu Sep 15 00:55:35 2022 ] Batch(230/243) done. Loss: 0.0094 lr:0.001000 network_time: 0.0291
624
+ [ Thu Sep 15 00:55:44 2022 ] Eval epoch: 84
625
+ [ Thu Sep 15 00:57:18 2022 ] Mean test loss of 796 batches: 3.1958792209625244.
626
+ [ Thu Sep 15 00:57:19 2022 ] Top1: 51.77%
627
+ [ Thu Sep 15 00:57:19 2022 ] Top5: 81.87%
628
+ [ Thu Sep 15 00:57:19 2022 ] Training epoch: 85
629
+ [ Thu Sep 15 00:58:26 2022 ] Batch(87/243) done. Loss: 0.0129 lr:0.001000 network_time: 0.0319
630
+ [ Thu Sep 15 00:59:39 2022 ] Batch(187/243) done. Loss: 0.0328 lr:0.001000 network_time: 0.0282
631
+ [ Thu Sep 15 01:00:19 2022 ] Eval epoch: 85
632
+ [ Thu Sep 15 01:01:53 2022 ] Mean test loss of 796 batches: 3.0967118740081787.
633
+ [ Thu Sep 15 01:01:53 2022 ] Top1: 51.37%
634
+ [ Thu Sep 15 01:01:53 2022 ] Top5: 81.54%
635
+ [ Thu Sep 15 01:01:54 2022 ] Training epoch: 86
636
+ [ Thu Sep 15 01:02:29 2022 ] Batch(44/243) done. Loss: 0.0091 lr:0.001000 network_time: 0.0277
637
+ [ Thu Sep 15 01:03:42 2022 ] Batch(144/243) done. Loss: 0.0084 lr:0.001000 network_time: 0.0271
638
+ [ Thu Sep 15 01:04:54 2022 ] Eval epoch: 86
639
+ [ Thu Sep 15 01:06:28 2022 ] Mean test loss of 796 batches: 3.108445644378662.
640
+ [ Thu Sep 15 01:06:28 2022 ] Top1: 51.18%
641
+ [ Thu Sep 15 01:06:28 2022 ] Top5: 81.47%
642
+ [ Thu Sep 15 01:06:29 2022 ] Training epoch: 87
643
+ [ Thu Sep 15 01:06:33 2022 ] Batch(1/243) done. Loss: 0.0121 lr:0.001000 network_time: 0.0312
644
+ [ Thu Sep 15 01:07:46 2022 ] Batch(101/243) done. Loss: 0.0061 lr:0.001000 network_time: 0.0274
645
+ [ Thu Sep 15 01:08:59 2022 ] Batch(201/243) done. Loss: 0.0068 lr:0.001000 network_time: 0.0262
646
+ [ Thu Sep 15 01:09:29 2022 ] Eval epoch: 87
647
+ [ Thu Sep 15 01:11:03 2022 ] Mean test loss of 796 batches: 3.0967729091644287.
648
+ [ Thu Sep 15 01:11:03 2022 ] Top1: 48.86%
649
+ [ Thu Sep 15 01:11:04 2022 ] Top5: 79.71%
650
+ [ Thu Sep 15 01:11:04 2022 ] Training epoch: 88
651
+ [ Thu Sep 15 01:11:50 2022 ] Batch(58/243) done. Loss: 0.0042 lr:0.001000 network_time: 0.0277
652
+ [ Thu Sep 15 01:13:03 2022 ] Batch(158/243) done. Loss: 0.0111 lr:0.001000 network_time: 0.0269
653
+ [ Thu Sep 15 01:14:04 2022 ] Eval epoch: 88
654
+ [ Thu Sep 15 01:15:38 2022 ] Mean test loss of 796 batches: 3.0789506435394287.
655
+ [ Thu Sep 15 01:15:38 2022 ] Top1: 50.65%
656
+ [ Thu Sep 15 01:15:39 2022 ] Top5: 81.25%
657
+ [ Thu Sep 15 01:15:39 2022 ] Training epoch: 89
658
+ [ Thu Sep 15 01:15:53 2022 ] Batch(15/243) done. Loss: 0.0034 lr:0.001000 network_time: 0.0330
659
+ [ Thu Sep 15 01:17:06 2022 ] Batch(115/243) done. Loss: 0.0077 lr:0.001000 network_time: 0.0273
660
+ [ Thu Sep 15 01:18:19 2022 ] Batch(215/243) done. Loss: 0.0049 lr:0.001000 network_time: 0.0344
661
+ [ Thu Sep 15 01:18:39 2022 ] Eval epoch: 89
662
+ [ Thu Sep 15 01:20:12 2022 ] Mean test loss of 796 batches: 3.1689484119415283.
663
+ [ Thu Sep 15 01:20:13 2022 ] Top1: 50.95%
664
+ [ Thu Sep 15 01:20:13 2022 ] Top5: 81.37%
665
+ [ Thu Sep 15 01:20:13 2022 ] Training epoch: 90
666
+ [ Thu Sep 15 01:21:09 2022 ] Batch(72/243) done. Loss: 0.0071 lr:0.001000 network_time: 0.0312
667
+ [ Thu Sep 15 01:22:22 2022 ] Batch(172/243) done. Loss: 0.0092 lr:0.001000 network_time: 0.0312
668
+ [ Thu Sep 15 01:23:14 2022 ] Eval epoch: 90
669
+ [ Thu Sep 15 01:24:47 2022 ] Mean test loss of 796 batches: 3.1945176124572754.
670
+ [ Thu Sep 15 01:24:48 2022 ] Top1: 50.79%
671
+ [ Thu Sep 15 01:24:48 2022 ] Top5: 81.26%
672
+ [ Thu Sep 15 01:24:48 2022 ] Training epoch: 91
673
+ [ Thu Sep 15 01:25:13 2022 ] Batch(29/243) done. Loss: 0.0064 lr:0.001000 network_time: 0.0272
674
+ [ Thu Sep 15 01:26:26 2022 ] Batch(129/243) done. Loss: 0.0096 lr:0.001000 network_time: 0.0309
675
+ [ Thu Sep 15 01:27:39 2022 ] Batch(229/243) done. Loss: 0.0052 lr:0.001000 network_time: 0.0268
676
+ [ Thu Sep 15 01:27:48 2022 ] Eval epoch: 91
677
+ [ Thu Sep 15 01:29:22 2022 ] Mean test loss of 796 batches: 3.0675299167633057.
678
+ [ Thu Sep 15 01:29:22 2022 ] Top1: 50.65%
679
+ [ Thu Sep 15 01:29:23 2022 ] Top5: 81.31%
680
+ [ Thu Sep 15 01:29:23 2022 ] Training epoch: 92
681
+ [ Thu Sep 15 01:30:29 2022 ] Batch(86/243) done. Loss: 0.0084 lr:0.001000 network_time: 0.0284
682
+ [ Thu Sep 15 01:31:42 2022 ] Batch(186/243) done. Loss: 0.0071 lr:0.001000 network_time: 0.0306
683
+ [ Thu Sep 15 01:32:23 2022 ] Eval epoch: 92
684
+ [ Thu Sep 15 01:33:57 2022 ] Mean test loss of 796 batches: 3.313861846923828.
685
+ [ Thu Sep 15 01:33:57 2022 ] Top1: 51.27%
686
+ [ Thu Sep 15 01:33:58 2022 ] Top5: 81.65%
687
+ [ Thu Sep 15 01:33:58 2022 ] Training epoch: 93
688
+ [ Thu Sep 15 01:34:32 2022 ] Batch(43/243) done. Loss: 0.0032 lr:0.001000 network_time: 0.0267
689
+ [ Thu Sep 15 01:35:45 2022 ] Batch(143/243) done. Loss: 0.0093 lr:0.001000 network_time: 0.0270
690
+ [ Thu Sep 15 01:36:58 2022 ] Eval epoch: 93
691
+ [ Thu Sep 15 01:38:31 2022 ] Mean test loss of 796 batches: 3.305858612060547.
692
+ [ Thu Sep 15 01:38:32 2022 ] Top1: 51.20%
693
+ [ Thu Sep 15 01:38:32 2022 ] Top5: 81.47%
694
+ [ Thu Sep 15 01:38:32 2022 ] Training epoch: 94
695
+ [ Thu Sep 15 01:38:36 2022 ] Batch(0/243) done. Loss: 0.0093 lr:0.001000 network_time: 0.0681
696
+ [ Thu Sep 15 01:39:49 2022 ] Batch(100/243) done. Loss: 0.0072 lr:0.001000 network_time: 0.0266
697
+ [ Thu Sep 15 01:41:02 2022 ] Batch(200/243) done. Loss: 0.0085 lr:0.001000 network_time: 0.0262
698
+ [ Thu Sep 15 01:41:33 2022 ] Eval epoch: 94
699
+ [ Thu Sep 15 01:43:06 2022 ] Mean test loss of 796 batches: 3.0634562969207764.
700
+ [ Thu Sep 15 01:43:06 2022 ] Top1: 46.61%
701
+ [ Thu Sep 15 01:43:07 2022 ] Top5: 78.71%
702
+ [ Thu Sep 15 01:43:07 2022 ] Training epoch: 95
703
+ [ Thu Sep 15 01:43:52 2022 ] Batch(57/243) done. Loss: 0.0061 lr:0.001000 network_time: 0.0297
704
+ [ Thu Sep 15 01:45:05 2022 ] Batch(157/243) done. Loss: 0.0087 lr:0.001000 network_time: 0.0282
705
+ [ Thu Sep 15 01:46:07 2022 ] Eval epoch: 95
706
+ [ Thu Sep 15 01:47:41 2022 ] Mean test loss of 796 batches: 3.1093084812164307.
707
+ [ Thu Sep 15 01:47:41 2022 ] Top1: 51.30%
708
+ [ Thu Sep 15 01:47:41 2022 ] Top5: 81.50%
709
+ [ Thu Sep 15 01:47:41 2022 ] Training epoch: 96
710
+ [ Thu Sep 15 01:47:55 2022 ] Batch(14/243) done. Loss: 0.0107 lr:0.001000 network_time: 0.0275
711
+ [ Thu Sep 15 01:49:08 2022 ] Batch(114/243) done. Loss: 0.0090 lr:0.001000 network_time: 0.0309
712
+ [ Thu Sep 15 01:50:21 2022 ] Batch(214/243) done. Loss: 0.0100 lr:0.001000 network_time: 0.0271
713
+ [ Thu Sep 15 01:50:41 2022 ] Eval epoch: 96
714
+ [ Thu Sep 15 01:52:15 2022 ] Mean test loss of 796 batches: 3.2472262382507324.
715
+ [ Thu Sep 15 01:52:15 2022 ] Top1: 50.70%
716
+ [ Thu Sep 15 01:52:15 2022 ] Top5: 81.35%
717
+ [ Thu Sep 15 01:52:16 2022 ] Training epoch: 97
718
+ [ Thu Sep 15 01:53:11 2022 ] Batch(71/243) done. Loss: 0.0099 lr:0.001000 network_time: 0.0254
719
+ [ Thu Sep 15 01:54:24 2022 ] Batch(171/243) done. Loss: 0.0050 lr:0.001000 network_time: 0.0284
720
+ [ Thu Sep 15 01:55:16 2022 ] Eval epoch: 97
721
+ [ Thu Sep 15 01:56:49 2022 ] Mean test loss of 796 batches: 3.1731739044189453.
722
+ [ Thu Sep 15 01:56:50 2022 ] Top1: 50.67%
723
+ [ Thu Sep 15 01:56:50 2022 ] Top5: 81.30%
724
+ [ Thu Sep 15 01:56:50 2022 ] Training epoch: 98
725
+ [ Thu Sep 15 01:57:14 2022 ] Batch(28/243) done. Loss: 0.0051 lr:0.001000 network_time: 0.0312
726
+ [ Thu Sep 15 01:58:27 2022 ] Batch(128/243) done. Loss: 0.0056 lr:0.001000 network_time: 0.0336
727
+ [ Thu Sep 15 01:59:40 2022 ] Batch(228/243) done. Loss: 0.0081 lr:0.001000 network_time: 0.0267
728
+ [ Thu Sep 15 01:59:50 2022 ] Eval epoch: 98
729
+ [ Thu Sep 15 02:01:24 2022 ] Mean test loss of 796 batches: 3.215578556060791.
730
+ [ Thu Sep 15 02:01:25 2022 ] Top1: 51.38%
731
+ [ Thu Sep 15 02:01:25 2022 ] Top5: 81.51%
732
+ [ Thu Sep 15 02:01:26 2022 ] Training epoch: 99
733
+ [ Thu Sep 15 02:02:31 2022 ] Batch(85/243) done. Loss: 0.0067 lr:0.001000 network_time: 0.0292
734
+ [ Thu Sep 15 02:03:44 2022 ] Batch(185/243) done. Loss: 0.0486 lr:0.001000 network_time: 0.0273
735
+ [ Thu Sep 15 02:04:26 2022 ] Eval epoch: 99
736
+ [ Thu Sep 15 02:05:59 2022 ] Mean test loss of 796 batches: 3.295635223388672.
737
+ [ Thu Sep 15 02:06:00 2022 ] Top1: 50.86%
738
+ [ Thu Sep 15 02:06:00 2022 ] Top5: 81.37%
739
+ [ Thu Sep 15 02:06:00 2022 ] Training epoch: 100
740
+ [ Thu Sep 15 02:06:35 2022 ] Batch(42/243) done. Loss: 0.0102 lr:0.001000 network_time: 0.0342
741
+ [ Thu Sep 15 02:07:48 2022 ] Batch(142/243) done. Loss: 0.0025 lr:0.001000 network_time: 0.0255
742
+ [ Thu Sep 15 02:09:01 2022 ] Batch(242/243) done. Loss: 0.0066 lr:0.001000 network_time: 0.0281
743
+ [ Thu Sep 15 02:09:01 2022 ] Eval epoch: 100
744
+ [ Thu Sep 15 02:10:35 2022 ] Mean test loss of 796 batches: 3.2508275508880615.
745
+ [ Thu Sep 15 02:10:35 2022 ] Top1: 51.27%
746
+ [ Thu Sep 15 02:10:36 2022 ] Top5: 81.60%
747
+ [ Thu Sep 15 02:10:36 2022 ] Training epoch: 101
748
+ [ Thu Sep 15 02:11:52 2022 ] Batch(99/243) done. Loss: 0.0055 lr:0.000100 network_time: 0.0261
749
+ [ Thu Sep 15 02:13:05 2022 ] Batch(199/243) done. Loss: 0.0052 lr:0.000100 network_time: 0.0303
750
+ [ Thu Sep 15 02:13:36 2022 ] Eval epoch: 101
751
+ [ Thu Sep 15 02:15:10 2022 ] Mean test loss of 796 batches: 3.3487186431884766.
752
+ [ Thu Sep 15 02:15:10 2022 ] Top1: 51.59%
753
+ [ Thu Sep 15 02:15:10 2022 ] Top5: 81.67%
754
+ [ Thu Sep 15 02:15:11 2022 ] Training epoch: 102
755
+ [ Thu Sep 15 02:15:55 2022 ] Batch(56/243) done. Loss: 0.0076 lr:0.000100 network_time: 0.0260
756
+ [ Thu Sep 15 02:17:08 2022 ] Batch(156/243) done. Loss: 0.0121 lr:0.000100 network_time: 0.0303
757
+ [ Thu Sep 15 02:18:11 2022 ] Eval epoch: 102
758
+ [ Thu Sep 15 02:19:44 2022 ] Mean test loss of 796 batches: 3.118297576904297.
759
+ [ Thu Sep 15 02:19:45 2022 ] Top1: 51.20%
760
+ [ Thu Sep 15 02:19:45 2022 ] Top5: 81.58%
761
+ [ Thu Sep 15 02:19:45 2022 ] Training epoch: 103
762
+ [ Thu Sep 15 02:19:58 2022 ] Batch(13/243) done. Loss: 0.0033 lr:0.000100 network_time: 0.0313
763
+ [ Thu Sep 15 02:21:11 2022 ] Batch(113/243) done. Loss: 0.0070 lr:0.000100 network_time: 0.0277
764
+ [ Thu Sep 15 02:22:24 2022 ] Batch(213/243) done. Loss: 0.0042 lr:0.000100 network_time: 0.0308
765
+ [ Thu Sep 15 02:22:45 2022 ] Eval epoch: 103
766
+ [ Thu Sep 15 02:24:19 2022 ] Mean test loss of 796 batches: 3.057490825653076.
767
+ [ Thu Sep 15 02:24:19 2022 ] Top1: 49.14%
768
+ [ Thu Sep 15 02:24:20 2022 ] Top5: 80.00%
769
+ [ Thu Sep 15 02:24:20 2022 ] Training epoch: 104
770
+ [ Thu Sep 15 02:25:15 2022 ] Batch(70/243) done. Loss: 0.0144 lr:0.000100 network_time: 0.0270
771
+ [ Thu Sep 15 02:26:27 2022 ] Batch(170/243) done. Loss: 0.0054 lr:0.000100 network_time: 0.0302
772
+ [ Thu Sep 15 02:27:20 2022 ] Eval epoch: 104
773
+ [ Thu Sep 15 02:28:53 2022 ] Mean test loss of 796 batches: 3.3775713443756104.
774
+ [ Thu Sep 15 02:28:54 2022 ] Top1: 51.52%
775
+ [ Thu Sep 15 02:28:54 2022 ] Top5: 81.56%
776
+ [ Thu Sep 15 02:28:54 2022 ] Training epoch: 105
777
+ [ Thu Sep 15 02:29:18 2022 ] Batch(27/243) done. Loss: 0.0240 lr:0.000100 network_time: 0.0318
778
+ [ Thu Sep 15 02:30:30 2022 ] Batch(127/243) done. Loss: 0.0145 lr:0.000100 network_time: 0.0280
779
+ [ Thu Sep 15 02:31:43 2022 ] Batch(227/243) done. Loss: 0.0197 lr:0.000100 network_time: 0.0282
780
+ [ Thu Sep 15 02:31:54 2022 ] Eval epoch: 105
781
+ [ Thu Sep 15 02:33:28 2022 ] Mean test loss of 796 batches: 3.1972925662994385.
782
+ [ Thu Sep 15 02:33:28 2022 ] Top1: 50.32%
783
+ [ Thu Sep 15 02:33:29 2022 ] Top5: 81.15%
784
+ [ Thu Sep 15 02:33:29 2022 ] Training epoch: 106
785
+ [ Thu Sep 15 02:34:34 2022 ] Batch(84/243) done. Loss: 0.0108 lr:0.000100 network_time: 0.0267
786
+ [ Thu Sep 15 02:35:47 2022 ] Batch(184/243) done. Loss: 0.0083 lr:0.000100 network_time: 0.0280
787
+ [ Thu Sep 15 02:36:29 2022 ] Eval epoch: 106
788
+ [ Thu Sep 15 02:38:02 2022 ] Mean test loss of 796 batches: 3.2501347064971924.
789
+ [ Thu Sep 15 02:38:03 2022 ] Top1: 51.36%
790
+ [ Thu Sep 15 02:38:03 2022 ] Top5: 81.57%
791
+ [ Thu Sep 15 02:38:03 2022 ] Training epoch: 107
792
+ [ Thu Sep 15 02:38:36 2022 ] Batch(41/243) done. Loss: 0.0084 lr:0.000100 network_time: 0.0275
793
+ [ Thu Sep 15 02:39:49 2022 ] Batch(141/243) done. Loss: 0.0061 lr:0.000100 network_time: 0.0265
794
+ [ Thu Sep 15 02:41:02 2022 ] Batch(241/243) done. Loss: 0.0130 lr:0.000100 network_time: 0.0270
795
+ [ Thu Sep 15 02:41:03 2022 ] Eval epoch: 107
796
+ [ Thu Sep 15 02:42:37 2022 ] Mean test loss of 796 batches: 2.9681849479675293.
797
+ [ Thu Sep 15 02:42:37 2022 ] Top1: 50.28%
798
+ [ Thu Sep 15 02:42:38 2022 ] Top5: 81.15%
799
+ [ Thu Sep 15 02:42:38 2022 ] Training epoch: 108
800
+ [ Thu Sep 15 02:43:53 2022 ] Batch(98/243) done. Loss: 0.0038 lr:0.000100 network_time: 0.0279
801
+ [ Thu Sep 15 02:45:06 2022 ] Batch(198/243) done. Loss: 0.0093 lr:0.000100 network_time: 0.0259
802
+ [ Thu Sep 15 02:45:38 2022 ] Eval epoch: 108
803
+ [ Thu Sep 15 02:47:12 2022 ] Mean test loss of 796 batches: 3.1740822792053223.
804
+ [ Thu Sep 15 02:47:12 2022 ] Top1: 50.97%
805
+ [ Thu Sep 15 02:47:13 2022 ] Top5: 81.63%
806
+ [ Thu Sep 15 02:47:13 2022 ] Training epoch: 109
807
+ [ Thu Sep 15 02:47:56 2022 ] Batch(55/243) done. Loss: 0.0059 lr:0.000100 network_time: 0.0299
808
+ [ Thu Sep 15 02:49:09 2022 ] Batch(155/243) done. Loss: 0.0042 lr:0.000100 network_time: 0.0309
809
+ [ Thu Sep 15 02:50:13 2022 ] Eval epoch: 109
810
+ [ Thu Sep 15 02:51:47 2022 ] Mean test loss of 796 batches: 3.140623092651367.
811
+ [ Thu Sep 15 02:51:47 2022 ] Top1: 50.99%
812
+ [ Thu Sep 15 02:51:48 2022 ] Top5: 81.59%
813
+ [ Thu Sep 15 02:51:48 2022 ] Training epoch: 110
814
+ [ Thu Sep 15 02:52:00 2022 ] Batch(12/243) done. Loss: 0.0076 lr:0.000100 network_time: 0.0300
815
+ [ Thu Sep 15 02:53:13 2022 ] Batch(112/243) done. Loss: 0.0082 lr:0.000100 network_time: 0.0272
816
+ [ Thu Sep 15 02:54:26 2022 ] Batch(212/243) done. Loss: 0.0047 lr:0.000100 network_time: 0.0286
817
+ [ Thu Sep 15 02:54:48 2022 ] Eval epoch: 110
818
+ [ Thu Sep 15 02:56:23 2022 ] Mean test loss of 796 batches: 3.0745203495025635.
819
+ [ Thu Sep 15 02:56:23 2022 ] Top1: 48.30%
820
+ [ Thu Sep 15 02:56:24 2022 ] Top5: 79.79%
821
+ [ Thu Sep 15 02:56:24 2022 ] Training epoch: 111
822
+ [ Thu Sep 15 02:57:18 2022 ] Batch(69/243) done. Loss: 0.0094 lr:0.000100 network_time: 0.0275
823
+ [ Thu Sep 15 02:58:30 2022 ] Batch(169/243) done. Loss: 0.0085 lr:0.000100 network_time: 0.0260
824
+ [ Thu Sep 15 02:59:24 2022 ] Eval epoch: 111
825
+ [ Thu Sep 15 03:00:57 2022 ] Mean test loss of 796 batches: 3.1857941150665283.
826
+ [ Thu Sep 15 03:00:58 2022 ] Top1: 51.34%
827
+ [ Thu Sep 15 03:00:58 2022 ] Top5: 81.53%
828
+ [ Thu Sep 15 03:00:58 2022 ] Training epoch: 112
829
+ [ Thu Sep 15 03:01:21 2022 ] Batch(26/243) done. Loss: 0.0065 lr:0.000100 network_time: 0.0269
830
+ [ Thu Sep 15 03:02:33 2022 ] Batch(126/243) done. Loss: 0.0094 lr:0.000100 network_time: 0.0304
831
+ [ Thu Sep 15 03:03:46 2022 ] Batch(226/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0303
832
+ [ Thu Sep 15 03:03:58 2022 ] Eval epoch: 112
833
+ [ Thu Sep 15 03:05:32 2022 ] Mean test loss of 796 batches: 3.084721088409424.
834
+ [ Thu Sep 15 03:05:32 2022 ] Top1: 51.01%
835
+ [ Thu Sep 15 03:05:32 2022 ] Top5: 81.38%
836
+ [ Thu Sep 15 03:05:33 2022 ] Training epoch: 113
837
+ [ Thu Sep 15 03:06:37 2022 ] Batch(83/243) done. Loss: 0.0072 lr:0.000100 network_time: 0.0278
838
+ [ Thu Sep 15 03:07:50 2022 ] Batch(183/243) done. Loss: 0.0037 lr:0.000100 network_time: 0.0289
839
+ [ Thu Sep 15 03:08:33 2022 ] Eval epoch: 113
840
+ [ Thu Sep 15 03:10:06 2022 ] Mean test loss of 796 batches: 3.158820867538452.
841
+ [ Thu Sep 15 03:10:06 2022 ] Top1: 50.64%
842
+ [ Thu Sep 15 03:10:07 2022 ] Top5: 81.21%
843
+ [ Thu Sep 15 03:10:07 2022 ] Training epoch: 114
844
+ [ Thu Sep 15 03:10:40 2022 ] Batch(40/243) done. Loss: 0.0080 lr:0.000100 network_time: 0.0276
845
+ [ Thu Sep 15 03:11:53 2022 ] Batch(140/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0271
846
+ [ Thu Sep 15 03:13:06 2022 ] Batch(240/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0269
847
+ [ Thu Sep 15 03:13:07 2022 ] Eval epoch: 114
848
+ [ Thu Sep 15 03:14:41 2022 ] Mean test loss of 796 batches: 3.095377206802368.
849
+ [ Thu Sep 15 03:14:41 2022 ] Top1: 51.16%
850
+ [ Thu Sep 15 03:14:42 2022 ] Top5: 81.58%
851
+ [ Thu Sep 15 03:14:42 2022 ] Training epoch: 115
852
+ [ Thu Sep 15 03:15:56 2022 ] Batch(97/243) done. Loss: 0.0078 lr:0.000100 network_time: 0.0262
853
+ [ Thu Sep 15 03:17:09 2022 ] Batch(197/243) done. Loss: 0.0063 lr:0.000100 network_time: 0.0259
854
+ [ Thu Sep 15 03:17:42 2022 ] Eval epoch: 115
855
+ [ Thu Sep 15 03:19:16 2022 ] Mean test loss of 796 batches: 3.1977145671844482.
856
+ [ Thu Sep 15 03:19:17 2022 ] Top1: 51.42%
857
+ [ Thu Sep 15 03:19:17 2022 ] Top5: 81.56%
858
+ [ Thu Sep 15 03:19:17 2022 ] Training epoch: 116
859
+ [ Thu Sep 15 03:20:00 2022 ] Batch(54/243) done. Loss: 0.0090 lr:0.000100 network_time: 0.0260
860
+ [ Thu Sep 15 03:21:13 2022 ] Batch(154/243) done. Loss: 0.0051 lr:0.000100 network_time: 0.0450
861
+ [ Thu Sep 15 03:22:17 2022 ] Eval epoch: 116
862
+ [ Thu Sep 15 03:23:51 2022 ] Mean test loss of 796 batches: 2.949658155441284.
863
+ [ Thu Sep 15 03:23:51 2022 ] Top1: 51.18%
864
+ [ Thu Sep 15 03:23:51 2022 ] Top5: 81.60%
865
+ [ Thu Sep 15 03:23:52 2022 ] Training epoch: 117
866
+ [ Thu Sep 15 03:24:03 2022 ] Batch(11/243) done. Loss: 0.0100 lr:0.000100 network_time: 0.0270
867
+ [ Thu Sep 15 03:25:16 2022 ] Batch(111/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0269
868
+ [ Thu Sep 15 03:26:29 2022 ] Batch(211/243) done. Loss: 0.0068 lr:0.000100 network_time: 0.0275
869
+ [ Thu Sep 15 03:26:52 2022 ] Eval epoch: 117
870
+ [ Thu Sep 15 03:28:25 2022 ] Mean test loss of 796 batches: 3.296842575073242.
871
+ [ Thu Sep 15 03:28:26 2022 ] Top1: 51.69%
872
+ [ Thu Sep 15 03:28:26 2022 ] Top5: 81.48%
873
+ [ Thu Sep 15 03:28:27 2022 ] Training epoch: 118
874
+ [ Thu Sep 15 03:29:20 2022 ] Batch(68/243) done. Loss: 0.0049 lr:0.000100 network_time: 0.0272
875
+ [ Thu Sep 15 03:30:33 2022 ] Batch(168/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0320
876
+ [ Thu Sep 15 03:31:27 2022 ] Eval epoch: 118
877
+ [ Thu Sep 15 03:33:00 2022 ] Mean test loss of 796 batches: 3.214509963989258.
878
+ [ Thu Sep 15 03:33:01 2022 ] Top1: 49.90%
879
+ [ Thu Sep 15 03:33:01 2022 ] Top5: 80.65%
880
+ [ Thu Sep 15 03:33:01 2022 ] Training epoch: 119
881
+ [ Thu Sep 15 03:33:23 2022 ] Batch(25/243) done. Loss: 0.0057 lr:0.000100 network_time: 0.0270
882
+ [ Thu Sep 15 03:34:36 2022 ] Batch(125/243) done. Loss: 0.0098 lr:0.000100 network_time: 0.0308
883
+ [ Thu Sep 15 03:35:49 2022 ] Batch(225/243) done. Loss: 0.0066 lr:0.000100 network_time: 0.0346
884
+ [ Thu Sep 15 03:36:02 2022 ] Eval epoch: 119
885
+ [ Thu Sep 15 03:37:36 2022 ] Mean test loss of 796 batches: 3.0812954902648926.
886
+ [ Thu Sep 15 03:37:36 2022 ] Top1: 50.24%
887
+ [ Thu Sep 15 03:37:36 2022 ] Top5: 81.20%
888
+ [ Thu Sep 15 03:37:37 2022 ] Training epoch: 120
889
+ [ Thu Sep 15 03:38:40 2022 ] Batch(82/243) done. Loss: 0.0040 lr:0.000100 network_time: 0.0282
890
+ [ Thu Sep 15 03:39:53 2022 ] Batch(182/243) done. Loss: 0.0082 lr:0.000100 network_time: 0.0272
891
+ [ Thu Sep 15 03:40:37 2022 ] Eval epoch: 120
892
+ [ Thu Sep 15 03:42:10 2022 ] Mean test loss of 796 batches: 3.254603624343872.
893
+ [ Thu Sep 15 03:42:10 2022 ] Top1: 51.25%
894
+ [ Thu Sep 15 03:42:11 2022 ] Top5: 81.58%
895
+ [ Thu Sep 15 03:42:11 2022 ] Training epoch: 121
896
+ [ Thu Sep 15 03:42:43 2022 ] Batch(39/243) done. Loss: 0.0165 lr:0.000100 network_time: 0.0294
897
+ [ Thu Sep 15 03:43:56 2022 ] Batch(139/243) done. Loss: 0.0096 lr:0.000100 network_time: 0.0262
898
+ [ Thu Sep 15 03:45:09 2022 ] Batch(239/243) done. Loss: 0.0031 lr:0.000100 network_time: 0.0307
899
+ [ Thu Sep 15 03:45:11 2022 ] Eval epoch: 121
900
+ [ Thu Sep 15 03:46:44 2022 ] Mean test loss of 796 batches: 3.079324245452881.
901
+ [ Thu Sep 15 03:46:45 2022 ] Top1: 51.25%
902
+ [ Thu Sep 15 03:46:45 2022 ] Top5: 81.72%
903
+ [ Thu Sep 15 03:46:45 2022 ] Training epoch: 122
904
+ [ Thu Sep 15 03:47:59 2022 ] Batch(96/243) done. Loss: 0.0052 lr:0.000100 network_time: 0.0274
905
+ [ Thu Sep 15 03:49:12 2022 ] Batch(196/243) done. Loss: 0.0104 lr:0.000100 network_time: 0.0276
906
+ [ Thu Sep 15 03:49:45 2022 ] Eval epoch: 122
907
+ [ Thu Sep 15 03:51:19 2022 ] Mean test loss of 796 batches: 3.0649008750915527.
908
+ [ Thu Sep 15 03:51:19 2022 ] Top1: 50.67%
909
+ [ Thu Sep 15 03:51:20 2022 ] Top5: 81.14%
910
+ [ Thu Sep 15 03:51:20 2022 ] Training epoch: 123
911
+ [ Thu Sep 15 03:52:02 2022 ] Batch(53/243) done. Loss: 0.0048 lr:0.000100 network_time: 0.0263
912
+ [ Thu Sep 15 03:53:15 2022 ] Batch(153/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0296
913
+ [ Thu Sep 15 03:54:20 2022 ] Eval epoch: 123
914
+ [ Thu Sep 15 03:55:54 2022 ] Mean test loss of 796 batches: 3.03798508644104.
915
+ [ Thu Sep 15 03:55:54 2022 ] Top1: 50.79%
916
+ [ Thu Sep 15 03:55:55 2022 ] Top5: 81.41%
917
+ [ Thu Sep 15 03:55:55 2022 ] Training epoch: 124
918
+ [ Thu Sep 15 03:56:06 2022 ] Batch(10/243) done. Loss: 0.0064 lr:0.000100 network_time: 0.0280
919
+ [ Thu Sep 15 03:57:18 2022 ] Batch(110/243) done. Loss: 0.0061 lr:0.000100 network_time: 0.0284
920
+ [ Thu Sep 15 03:58:31 2022 ] Batch(210/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0274
921
+ [ Thu Sep 15 03:58:55 2022 ] Eval epoch: 124
922
+ [ Thu Sep 15 04:00:29 2022 ] Mean test loss of 796 batches: 3.2211194038391113.
923
+ [ Thu Sep 15 04:00:29 2022 ] Top1: 51.45%
924
+ [ Thu Sep 15 04:00:30 2022 ] Top5: 81.65%
925
+ [ Thu Sep 15 04:00:30 2022 ] Training epoch: 125
926
+ [ Thu Sep 15 04:01:22 2022 ] Batch(67/243) done. Loss: 0.0040 lr:0.000100 network_time: 0.0279
927
+ [ Thu Sep 15 04:02:35 2022 ] Batch(167/243) done. Loss: 0.0201 lr:0.000100 network_time: 0.0277
928
+ [ Thu Sep 15 04:03:30 2022 ] Eval epoch: 125
929
+ [ Thu Sep 15 04:05:03 2022 ] Mean test loss of 796 batches: 3.167156219482422.
930
+ [ Thu Sep 15 04:05:04 2022 ] Top1: 48.77%
931
+ [ Thu Sep 15 04:05:04 2022 ] Top5: 79.97%
932
+ [ Thu Sep 15 04:05:04 2022 ] Training epoch: 126
933
+ [ Thu Sep 15 04:05:26 2022 ] Batch(24/243) done. Loss: 0.0096 lr:0.000100 network_time: 0.0275
934
+ [ Thu Sep 15 04:06:38 2022 ] Batch(124/243) done. Loss: 0.0086 lr:0.000100 network_time: 0.0274
935
+ [ Thu Sep 15 04:07:51 2022 ] Batch(224/243) done. Loss: 0.0037 lr:0.000100 network_time: 0.0263
936
+ [ Thu Sep 15 04:08:05 2022 ] Eval epoch: 126
937
+ [ Thu Sep 15 04:09:38 2022 ] Mean test loss of 796 batches: 3.327735424041748.
938
+ [ Thu Sep 15 04:09:39 2022 ] Top1: 50.98%
939
+ [ Thu Sep 15 04:09:39 2022 ] Top5: 81.23%
940
+ [ Thu Sep 15 04:09:39 2022 ] Training epoch: 127
941
+ [ Thu Sep 15 04:10:42 2022 ] Batch(81/243) done. Loss: 0.0064 lr:0.000100 network_time: 0.0268
942
+ [ Thu Sep 15 04:11:55 2022 ] Batch(181/243) done. Loss: 0.0091 lr:0.000100 network_time: 0.0277
943
+ [ Thu Sep 15 04:12:40 2022 ] Eval epoch: 127
944
+ [ Thu Sep 15 04:14:13 2022 ] Mean test loss of 796 batches: 3.1817829608917236.
945
+ [ Thu Sep 15 04:14:14 2022 ] Top1: 50.90%
946
+ [ Thu Sep 15 04:14:14 2022 ] Top5: 81.36%
947
+ [ Thu Sep 15 04:14:14 2022 ] Training epoch: 128
948
+ [ Thu Sep 15 04:14:46 2022 ] Batch(38/243) done. Loss: 0.0044 lr:0.000100 network_time: 0.0259
949
+ [ Thu Sep 15 04:15:59 2022 ] Batch(138/243) done. Loss: 0.0055 lr:0.000100 network_time: 0.0424
950
+ [ Thu Sep 15 04:17:12 2022 ] Batch(238/243) done. Loss: 0.0065 lr:0.000100 network_time: 0.0269
951
+ [ Thu Sep 15 04:17:15 2022 ] Eval epoch: 128
952
+ [ Thu Sep 15 04:18:49 2022 ] Mean test loss of 796 batches: 3.022956609725952.
953
+ [ Thu Sep 15 04:18:49 2022 ] Top1: 51.15%
954
+ [ Thu Sep 15 04:18:49 2022 ] Top5: 81.64%
955
+ [ Thu Sep 15 04:18:50 2022 ] Training epoch: 129
956
+ [ Thu Sep 15 04:20:02 2022 ] Batch(95/243) done. Loss: 0.0065 lr:0.000100 network_time: 0.0269
957
+ [ Thu Sep 15 04:21:15 2022 ] Batch(195/243) done. Loss: 0.0073 lr:0.000100 network_time: 0.0268
958
+ [ Thu Sep 15 04:21:50 2022 ] Eval epoch: 129
959
+ [ Thu Sep 15 04:23:23 2022 ] Mean test loss of 796 batches: 3.0225670337677.
960
+ [ Thu Sep 15 04:23:24 2022 ] Top1: 49.34%
961
+ [ Thu Sep 15 04:23:24 2022 ] Top5: 80.52%
962
+ [ Thu Sep 15 04:23:24 2022 ] Training epoch: 130
963
+ [ Thu Sep 15 04:24:06 2022 ] Batch(52/243) done. Loss: 0.0038 lr:0.000100 network_time: 0.0269
964
+ [ Thu Sep 15 04:25:19 2022 ] Batch(152/243) done. Loss: 0.0050 lr:0.000100 network_time: 0.0273
965
+ [ Thu Sep 15 04:26:25 2022 ] Eval epoch: 130
966
+ [ Thu Sep 15 04:27:59 2022 ] Mean test loss of 796 batches: 3.0677361488342285.
967
+ [ Thu Sep 15 04:27:59 2022 ] Top1: 51.32%
968
+ [ Thu Sep 15 04:28:00 2022 ] Top5: 81.61%
969
+ [ Thu Sep 15 04:28:00 2022 ] Training epoch: 131
970
+ [ Thu Sep 15 04:28:10 2022 ] Batch(9/243) done. Loss: 0.0053 lr:0.000100 network_time: 0.0280
971
+ [ Thu Sep 15 04:29:23 2022 ] Batch(109/243) done. Loss: 0.0057 lr:0.000100 network_time: 0.0277
972
+ [ Thu Sep 15 04:30:36 2022 ] Batch(209/243) done. Loss: 0.0192 lr:0.000100 network_time: 0.0273
973
+ [ Thu Sep 15 04:31:00 2022 ] Eval epoch: 131
974
+ [ Thu Sep 15 04:32:34 2022 ] Mean test loss of 796 batches: 3.1217525005340576.
975
+ [ Thu Sep 15 04:32:34 2022 ] Top1: 50.86%
976
+ [ Thu Sep 15 04:32:35 2022 ] Top5: 81.28%
977
+ [ Thu Sep 15 04:32:35 2022 ] Training epoch: 132
978
+ [ Thu Sep 15 04:33:26 2022 ] Batch(66/243) done. Loss: 0.0034 lr:0.000100 network_time: 0.0272
979
+ [ Thu Sep 15 04:34:39 2022 ] Batch(166/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0277
980
+ [ Thu Sep 15 04:35:35 2022 ] Eval epoch: 132
981
+ [ Thu Sep 15 04:37:09 2022 ] Mean test loss of 796 batches: 2.9388933181762695.
982
+ [ Thu Sep 15 04:37:09 2022 ] Top1: 51.00%
983
+ [ Thu Sep 15 04:37:09 2022 ] Top5: 81.54%
984
+ [ Thu Sep 15 04:37:10 2022 ] Training epoch: 133
985
+ [ Thu Sep 15 04:37:30 2022 ] Batch(23/243) done. Loss: 0.0048 lr:0.000100 network_time: 0.0279
986
+ [ Thu Sep 15 04:38:43 2022 ] Batch(123/243) done. Loss: 0.0090 lr:0.000100 network_time: 0.0533
987
+ [ Thu Sep 15 04:39:56 2022 ] Batch(223/243) done. Loss: 0.0128 lr:0.000100 network_time: 0.0302
988
+ [ Thu Sep 15 04:40:10 2022 ] Eval epoch: 133
989
+ [ Thu Sep 15 04:41:44 2022 ] Mean test loss of 796 batches: 3.247342109680176.
990
+ [ Thu Sep 15 04:41:44 2022 ] Top1: 46.12%
991
+ [ Thu Sep 15 04:41:45 2022 ] Top5: 78.11%
992
+ [ Thu Sep 15 04:41:45 2022 ] Training epoch: 134
993
+ [ Thu Sep 15 04:42:46 2022 ] Batch(80/243) done. Loss: 0.0072 lr:0.000100 network_time: 0.0276
994
+ [ Thu Sep 15 04:43:59 2022 ] Batch(180/243) done. Loss: 0.0043 lr:0.000100 network_time: 0.0273
995
+ [ Thu Sep 15 04:44:45 2022 ] Eval epoch: 134
996
+ [ Thu Sep 15 04:46:19 2022 ] Mean test loss of 796 batches: 3.0710413455963135.
997
+ [ Thu Sep 15 04:46:19 2022 ] Top1: 51.16%
998
+ [ Thu Sep 15 04:46:19 2022 ] Top5: 81.39%
999
+ [ Thu Sep 15 04:46:19 2022 ] Training epoch: 135
1000
+ [ Thu Sep 15 04:46:50 2022 ] Batch(37/243) done. Loss: 0.0047 lr:0.000100 network_time: 0.0276
1001
+ [ Thu Sep 15 04:48:03 2022 ] Batch(137/243) done. Loss: 0.0090 lr:0.000100 network_time: 0.0312
1002
+ [ Thu Sep 15 04:49:16 2022 ] Batch(237/243) done. Loss: 0.0177 lr:0.000100 network_time: 0.0272
1003
+ [ Thu Sep 15 04:49:20 2022 ] Eval epoch: 135
1004
+ [ Thu Sep 15 04:50:53 2022 ] Mean test loss of 796 batches: 3.1598618030548096.
1005
+ [ Thu Sep 15 04:50:53 2022 ] Top1: 51.52%
1006
+ [ Thu Sep 15 04:50:53 2022 ] Top5: 81.73%
1007
+ [ Thu Sep 15 04:50:54 2022 ] Training epoch: 136
1008
+ [ Thu Sep 15 04:52:06 2022 ] Batch(94/243) done. Loss: 0.0199 lr:0.000100 network_time: 0.0277
1009
+ [ Thu Sep 15 04:53:19 2022 ] Batch(194/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0288
1010
+ [ Thu Sep 15 04:53:54 2022 ] Eval epoch: 136
1011
+ [ Thu Sep 15 04:55:27 2022 ] Mean test loss of 796 batches: 3.3396480083465576.
1012
+ [ Thu Sep 15 04:55:27 2022 ] Top1: 51.16%
1013
+ [ Thu Sep 15 04:55:28 2022 ] Top5: 81.43%
1014
+ [ Thu Sep 15 04:55:28 2022 ] Training epoch: 137
1015
+ [ Thu Sep 15 04:56:09 2022 ] Batch(51/243) done. Loss: 0.0076 lr:0.000100 network_time: 0.0283
1016
+ [ Thu Sep 15 04:57:22 2022 ] Batch(151/243) done. Loss: 0.0110 lr:0.000100 network_time: 0.0278
1017
+ [ Thu Sep 15 04:58:28 2022 ] Eval epoch: 137
1018
+ [ Thu Sep 15 05:00:03 2022 ] Mean test loss of 796 batches: 3.2537786960601807.
1019
+ [ Thu Sep 15 05:00:03 2022 ] Top1: 51.74%
1020
+ [ Thu Sep 15 05:00:03 2022 ] Top5: 81.74%
1021
+ [ Thu Sep 15 05:00:03 2022 ] Training epoch: 138
1022
+ [ Thu Sep 15 05:00:13 2022 ] Batch(8/243) done. Loss: 0.0059 lr:0.000100 network_time: 0.0269
1023
+ [ Thu Sep 15 05:01:26 2022 ] Batch(108/243) done. Loss: 0.0089 lr:0.000100 network_time: 0.0274
1024
+ [ Thu Sep 15 05:02:39 2022 ] Batch(208/243) done. Loss: 0.0198 lr:0.000100 network_time: 0.0267
1025
+ [ Thu Sep 15 05:03:04 2022 ] Eval epoch: 138
1026
+ [ Thu Sep 15 05:04:37 2022 ] Mean test loss of 796 batches: 3.079983711242676.
1027
+ [ Thu Sep 15 05:04:37 2022 ] Top1: 50.78%
1028
+ [ Thu Sep 15 05:04:38 2022 ] Top5: 81.26%
1029
+ [ Thu Sep 15 05:04:38 2022 ] Training epoch: 139
1030
+ [ Thu Sep 15 05:05:29 2022 ] Batch(65/243) done. Loss: 0.0237 lr:0.000100 network_time: 0.0445
1031
+ [ Thu Sep 15 05:06:42 2022 ] Batch(165/243) done. Loss: 0.0038 lr:0.000100 network_time: 0.0308
1032
+ [ Thu Sep 15 05:07:38 2022 ] Eval epoch: 139
1033
+ [ Thu Sep 15 05:09:12 2022 ] Mean test loss of 796 batches: 3.1228296756744385.
1034
+ [ Thu Sep 15 05:09:12 2022 ] Top1: 51.04%
1035
+ [ Thu Sep 15 05:09:13 2022 ] Top5: 81.03%
1036
+ [ Thu Sep 15 05:09:13 2022 ] Training epoch: 140
1037
+ [ Thu Sep 15 05:09:32 2022 ] Batch(22/243) done. Loss: 0.0093 lr:0.000100 network_time: 0.0322
1038
+ [ Thu Sep 15 05:10:45 2022 ] Batch(122/243) done. Loss: 0.0093 lr:0.000100 network_time: 0.0295
1039
+ [ Thu Sep 15 05:11:58 2022 ] Batch(222/243) done. Loss: 0.0060 lr:0.000100 network_time: 0.0276
1040
+ [ Thu Sep 15 05:12:13 2022 ] Eval epoch: 140
1041
+ [ Thu Sep 15 05:13:47 2022 ] Mean test loss of 796 batches: 3.206451892852783.
1042
+ [ Thu Sep 15 05:13:47 2022 ] Top1: 51.22%
1043
+ [ Thu Sep 15 05:13:48 2022 ] Top5: 81.53%
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_motion_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu120_joint_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/ntu120_xsub/train_joint.yaml
5
+ device:
6
+ - 4
7
+ - 5
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 120
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu120_joint_xsub
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu120_joint_xsub
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7032ac5d6594258fa0c42f8bd1e2317d14fdaa2bbd0c22e11038ce33d84f6f1d
3
+ size 29946137
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/log.txt ADDED
@@ -0,0 +1,1043 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 14 18:31:42 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu120_joint_xsub', 'model_saved_name': './save_models/ntu120_joint_xsub', 'Experiment_name': 'ntu120_joint_xsub', 'config': './config/ntu120_xsub/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Wed Sep 14 18:31:42 2022 ] Training epoch: 1
5
+ [ Wed Sep 14 18:33:02 2022 ] Batch(99/243) done. Loss: 3.7763 lr:0.100000 network_time: 0.0264
6
+ [ Wed Sep 14 18:34:15 2022 ] Batch(199/243) done. Loss: 2.6228 lr:0.100000 network_time: 0.0266
7
+ [ Wed Sep 14 18:34:46 2022 ] Eval epoch: 1
8
+ [ Wed Sep 14 18:36:20 2022 ] Mean test loss of 796 batches: 5.046554088592529.
9
+ [ Wed Sep 14 18:36:20 2022 ] Top1: 9.24%
10
+ [ Wed Sep 14 18:36:20 2022 ] Top5: 25.14%
11
+ [ Wed Sep 14 18:36:21 2022 ] Training epoch: 2
12
+ [ Wed Sep 14 18:37:05 2022 ] Batch(56/243) done. Loss: 2.6595 lr:0.100000 network_time: 0.0272
13
+ [ Wed Sep 14 18:38:17 2022 ] Batch(156/243) done. Loss: 2.4710 lr:0.100000 network_time: 0.0266
14
+ [ Wed Sep 14 18:39:20 2022 ] Eval epoch: 2
15
+ [ Wed Sep 14 18:40:54 2022 ] Mean test loss of 796 batches: 3.9119958877563477.
16
+ [ Wed Sep 14 18:40:54 2022 ] Top1: 17.89%
17
+ [ Wed Sep 14 18:40:54 2022 ] Top5: 36.58%
18
+ [ Wed Sep 14 18:40:55 2022 ] Training epoch: 3
19
+ [ Wed Sep 14 18:41:07 2022 ] Batch(13/243) done. Loss: 2.0612 lr:0.100000 network_time: 0.0280
20
+ [ Wed Sep 14 18:42:20 2022 ] Batch(113/243) done. Loss: 1.8304 lr:0.100000 network_time: 0.0263
21
+ [ Wed Sep 14 18:43:33 2022 ] Batch(213/243) done. Loss: 2.0863 lr:0.100000 network_time: 0.0261
22
+ [ Wed Sep 14 18:43:54 2022 ] Eval epoch: 3
23
+ [ Wed Sep 14 18:45:27 2022 ] Mean test loss of 796 batches: 4.149970531463623.
24
+ [ Wed Sep 14 18:45:28 2022 ] Top1: 18.78%
25
+ [ Wed Sep 14 18:45:28 2022 ] Top5: 42.45%
26
+ [ Wed Sep 14 18:45:28 2022 ] Training epoch: 4
27
+ [ Wed Sep 14 18:46:23 2022 ] Batch(70/243) done. Loss: 1.4658 lr:0.100000 network_time: 0.0271
28
+ [ Wed Sep 14 18:47:35 2022 ] Batch(170/243) done. Loss: 1.3747 lr:0.100000 network_time: 0.0272
29
+ [ Wed Sep 14 18:48:28 2022 ] Eval epoch: 4
30
+ [ Wed Sep 14 18:50:01 2022 ] Mean test loss of 796 batches: 3.2994141578674316.
31
+ [ Wed Sep 14 18:50:01 2022 ] Top1: 25.44%
32
+ [ Wed Sep 14 18:50:02 2022 ] Top5: 53.52%
33
+ [ Wed Sep 14 18:50:02 2022 ] Training epoch: 5
34
+ [ Wed Sep 14 18:50:25 2022 ] Batch(27/243) done. Loss: 1.5292 lr:0.100000 network_time: 0.0295
35
+ [ Wed Sep 14 18:51:38 2022 ] Batch(127/243) done. Loss: 1.5052 lr:0.100000 network_time: 0.0278
36
+ [ Wed Sep 14 18:52:51 2022 ] Batch(227/243) done. Loss: 1.6211 lr:0.100000 network_time: 0.0309
37
+ [ Wed Sep 14 18:53:02 2022 ] Eval epoch: 5
38
+ [ Wed Sep 14 18:54:35 2022 ] Mean test loss of 796 batches: 3.1267082691192627.
39
+ [ Wed Sep 14 18:54:36 2022 ] Top1: 27.96%
40
+ [ Wed Sep 14 18:54:36 2022 ] Top5: 56.70%
41
+ [ Wed Sep 14 18:54:36 2022 ] Training epoch: 6
42
+ [ Wed Sep 14 18:55:41 2022 ] Batch(84/243) done. Loss: 1.5147 lr:0.100000 network_time: 0.0307
43
+ [ Wed Sep 14 18:56:53 2022 ] Batch(184/243) done. Loss: 1.0397 lr:0.100000 network_time: 0.0267
44
+ [ Wed Sep 14 18:57:36 2022 ] Eval epoch: 6
45
+ [ Wed Sep 14 18:59:09 2022 ] Mean test loss of 796 batches: 2.968212366104126.
46
+ [ Wed Sep 14 18:59:09 2022 ] Top1: 29.50%
47
+ [ Wed Sep 14 18:59:10 2022 ] Top5: 62.41%
48
+ [ Wed Sep 14 18:59:10 2022 ] Training epoch: 7
49
+ [ Wed Sep 14 18:59:43 2022 ] Batch(41/243) done. Loss: 1.2200 lr:0.100000 network_time: 0.0276
50
+ [ Wed Sep 14 19:00:56 2022 ] Batch(141/243) done. Loss: 0.8820 lr:0.100000 network_time: 0.0316
51
+ [ Wed Sep 14 19:02:08 2022 ] Batch(241/243) done. Loss: 1.0603 lr:0.100000 network_time: 0.0315
52
+ [ Wed Sep 14 19:02:09 2022 ] Eval epoch: 7
53
+ [ Wed Sep 14 19:03:43 2022 ] Mean test loss of 796 batches: 2.7241861820220947.
54
+ [ Wed Sep 14 19:03:43 2022 ] Top1: 33.51%
55
+ [ Wed Sep 14 19:03:44 2022 ] Top5: 67.41%
56
+ [ Wed Sep 14 19:03:44 2022 ] Training epoch: 8
57
+ [ Wed Sep 14 19:04:58 2022 ] Batch(98/243) done. Loss: 0.8214 lr:0.100000 network_time: 0.0272
58
+ [ Wed Sep 14 19:06:11 2022 ] Batch(198/243) done. Loss: 0.8479 lr:0.100000 network_time: 0.0271
59
+ [ Wed Sep 14 19:06:43 2022 ] Eval epoch: 8
60
+ [ Wed Sep 14 19:08:17 2022 ] Mean test loss of 796 batches: 2.666290044784546.
61
+ [ Wed Sep 14 19:08:17 2022 ] Top1: 35.74%
62
+ [ Wed Sep 14 19:08:17 2022 ] Top5: 67.01%
63
+ [ Wed Sep 14 19:08:18 2022 ] Training epoch: 9
64
+ [ Wed Sep 14 19:09:01 2022 ] Batch(55/243) done. Loss: 1.2169 lr:0.100000 network_time: 0.0288
65
+ [ Wed Sep 14 19:10:14 2022 ] Batch(155/243) done. Loss: 1.0688 lr:0.100000 network_time: 0.0333
66
+ [ Wed Sep 14 19:11:17 2022 ] Eval epoch: 9
67
+ [ Wed Sep 14 19:12:51 2022 ] Mean test loss of 796 batches: 2.7376022338867188.
68
+ [ Wed Sep 14 19:12:51 2022 ] Top1: 35.14%
69
+ [ Wed Sep 14 19:12:52 2022 ] Top5: 66.76%
70
+ [ Wed Sep 14 19:12:52 2022 ] Training epoch: 10
71
+ [ Wed Sep 14 19:13:04 2022 ] Batch(12/243) done. Loss: 0.9615 lr:0.100000 network_time: 0.0263
72
+ [ Wed Sep 14 19:14:17 2022 ] Batch(112/243) done. Loss: 1.0427 lr:0.100000 network_time: 0.0269
73
+ [ Wed Sep 14 19:15:30 2022 ] Batch(212/243) done. Loss: 1.2006 lr:0.100000 network_time: 0.0309
74
+ [ Wed Sep 14 19:15:52 2022 ] Eval epoch: 10
75
+ [ Wed Sep 14 19:17:26 2022 ] Mean test loss of 796 batches: 2.6587717533111572.
76
+ [ Wed Sep 14 19:17:26 2022 ] Top1: 36.70%
77
+ [ Wed Sep 14 19:17:26 2022 ] Top5: 70.12%
78
+ [ Wed Sep 14 19:17:27 2022 ] Training epoch: 11
79
+ [ Wed Sep 14 19:18:20 2022 ] Batch(69/243) done. Loss: 0.8898 lr:0.100000 network_time: 0.0301
80
+ [ Wed Sep 14 19:19:33 2022 ] Batch(169/243) done. Loss: 0.7702 lr:0.100000 network_time: 0.0271
81
+ [ Wed Sep 14 19:20:26 2022 ] Eval epoch: 11
82
+ [ Wed Sep 14 19:21:59 2022 ] Mean test loss of 796 batches: 2.6572091579437256.
83
+ [ Wed Sep 14 19:22:00 2022 ] Top1: 35.17%
84
+ [ Wed Sep 14 19:22:00 2022 ] Top5: 70.29%
85
+ [ Wed Sep 14 19:22:00 2022 ] Training epoch: 12
86
+ [ Wed Sep 14 19:22:23 2022 ] Batch(26/243) done. Loss: 0.5439 lr:0.100000 network_time: 0.0277
87
+ [ Wed Sep 14 19:23:36 2022 ] Batch(126/243) done. Loss: 0.9749 lr:0.100000 network_time: 0.0269
88
+ [ Wed Sep 14 19:24:48 2022 ] Batch(226/243) done. Loss: 1.0488 lr:0.100000 network_time: 0.0316
89
+ [ Wed Sep 14 19:25:00 2022 ] Eval epoch: 12
90
+ [ Wed Sep 14 19:26:34 2022 ] Mean test loss of 796 batches: 2.824629783630371.
91
+ [ Wed Sep 14 19:26:34 2022 ] Top1: 35.29%
92
+ [ Wed Sep 14 19:26:35 2022 ] Top5: 67.09%
93
+ [ Wed Sep 14 19:26:35 2022 ] Training epoch: 13
94
+ [ Wed Sep 14 19:27:39 2022 ] Batch(83/243) done. Loss: 0.7571 lr:0.100000 network_time: 0.0294
95
+ [ Wed Sep 14 19:28:52 2022 ] Batch(183/243) done. Loss: 0.8261 lr:0.100000 network_time: 0.0262
96
+ [ Wed Sep 14 19:29:35 2022 ] Eval epoch: 13
97
+ [ Wed Sep 14 19:31:08 2022 ] Mean test loss of 796 batches: 2.3490281105041504.
98
+ [ Wed Sep 14 19:31:09 2022 ] Top1: 41.19%
99
+ [ Wed Sep 14 19:31:09 2022 ] Top5: 74.54%
100
+ [ Wed Sep 14 19:31:10 2022 ] Training epoch: 14
101
+ [ Wed Sep 14 19:31:42 2022 ] Batch(40/243) done. Loss: 0.4956 lr:0.100000 network_time: 0.0235
102
+ [ Wed Sep 14 19:32:55 2022 ] Batch(140/243) done. Loss: 0.9356 lr:0.100000 network_time: 0.0266
103
+ [ Wed Sep 14 19:34:07 2022 ] Batch(240/243) done. Loss: 1.0499 lr:0.100000 network_time: 0.0434
104
+ [ Wed Sep 14 19:34:09 2022 ] Eval epoch: 14
105
+ [ Wed Sep 14 19:35:42 2022 ] Mean test loss of 796 batches: 2.4458417892456055.
106
+ [ Wed Sep 14 19:35:42 2022 ] Top1: 38.41%
107
+ [ Wed Sep 14 19:35:43 2022 ] Top5: 71.95%
108
+ [ Wed Sep 14 19:35:43 2022 ] Training epoch: 15
109
+ [ Wed Sep 14 19:36:57 2022 ] Batch(97/243) done. Loss: 0.9353 lr:0.100000 network_time: 0.0274
110
+ [ Wed Sep 14 19:38:10 2022 ] Batch(197/243) done. Loss: 0.5173 lr:0.100000 network_time: 0.0406
111
+ [ Wed Sep 14 19:38:42 2022 ] Eval epoch: 15
112
+ [ Wed Sep 14 19:40:16 2022 ] Mean test loss of 796 batches: 2.397752523422241.
113
+ [ Wed Sep 14 19:40:17 2022 ] Top1: 41.95%
114
+ [ Wed Sep 14 19:40:17 2022 ] Top5: 75.68%
115
+ [ Wed Sep 14 19:40:17 2022 ] Training epoch: 16
116
+ [ Wed Sep 14 19:41:00 2022 ] Batch(54/243) done. Loss: 0.5062 lr:0.100000 network_time: 0.0263
117
+ [ Wed Sep 14 19:42:13 2022 ] Batch(154/243) done. Loss: 0.6092 lr:0.100000 network_time: 0.0276
118
+ [ Wed Sep 14 19:43:17 2022 ] Eval epoch: 16
119
+ [ Wed Sep 14 19:44:50 2022 ] Mean test loss of 796 batches: 2.802043914794922.
120
+ [ Wed Sep 14 19:44:51 2022 ] Top1: 38.52%
121
+ [ Wed Sep 14 19:44:51 2022 ] Top5: 71.35%
122
+ [ Wed Sep 14 19:44:51 2022 ] Training epoch: 17
123
+ [ Wed Sep 14 19:45:03 2022 ] Batch(11/243) done. Loss: 0.8011 lr:0.100000 network_time: 0.0280
124
+ [ Wed Sep 14 19:46:15 2022 ] Batch(111/243) done. Loss: 0.6675 lr:0.100000 network_time: 0.0254
125
+ [ Wed Sep 14 19:47:28 2022 ] Batch(211/243) done. Loss: 0.8174 lr:0.100000 network_time: 0.0271
126
+ [ Wed Sep 14 19:47:51 2022 ] Eval epoch: 17
127
+ [ Wed Sep 14 19:49:24 2022 ] Mean test loss of 796 batches: 2.7333076000213623.
128
+ [ Wed Sep 14 19:49:24 2022 ] Top1: 38.83%
129
+ [ Wed Sep 14 19:49:25 2022 ] Top5: 71.56%
130
+ [ Wed Sep 14 19:49:25 2022 ] Training epoch: 18
131
+ [ Wed Sep 14 19:50:18 2022 ] Batch(68/243) done. Loss: 0.7092 lr:0.100000 network_time: 0.0274
132
+ [ Wed Sep 14 19:51:31 2022 ] Batch(168/243) done. Loss: 0.6670 lr:0.100000 network_time: 0.0264
133
+ [ Wed Sep 14 19:52:25 2022 ] Eval epoch: 18
134
+ [ Wed Sep 14 19:53:58 2022 ] Mean test loss of 796 batches: 2.7909624576568604.
135
+ [ Wed Sep 14 19:53:59 2022 ] Top1: 39.14%
136
+ [ Wed Sep 14 19:54:00 2022 ] Top5: 72.63%
137
+ [ Wed Sep 14 19:54:00 2022 ] Training epoch: 19
138
+ [ Wed Sep 14 19:54:21 2022 ] Batch(25/243) done. Loss: 0.6762 lr:0.100000 network_time: 0.0326
139
+ [ Wed Sep 14 19:55:34 2022 ] Batch(125/243) done. Loss: 0.5623 lr:0.100000 network_time: 0.0266
140
+ [ Wed Sep 14 19:56:47 2022 ] Batch(225/243) done. Loss: 0.7143 lr:0.100000 network_time: 0.0264
141
+ [ Wed Sep 14 19:56:59 2022 ] Eval epoch: 19
142
+ [ Wed Sep 14 19:58:33 2022 ] Mean test loss of 796 batches: 2.4540889263153076.
143
+ [ Wed Sep 14 19:58:33 2022 ] Top1: 42.82%
144
+ [ Wed Sep 14 19:58:34 2022 ] Top5: 75.10%
145
+ [ Wed Sep 14 19:58:34 2022 ] Training epoch: 20
146
+ [ Wed Sep 14 19:59:37 2022 ] Batch(82/243) done. Loss: 0.5975 lr:0.100000 network_time: 0.0294
147
+ [ Wed Sep 14 20:00:50 2022 ] Batch(182/243) done. Loss: 0.5475 lr:0.100000 network_time: 0.0273
148
+ [ Wed Sep 14 20:01:33 2022 ] Eval epoch: 20
149
+ [ Wed Sep 14 20:03:07 2022 ] Mean test loss of 796 batches: 2.881371259689331.
150
+ [ Wed Sep 14 20:03:07 2022 ] Top1: 41.01%
151
+ [ Wed Sep 14 20:03:07 2022 ] Top5: 71.62%
152
+ [ Wed Sep 14 20:03:08 2022 ] Training epoch: 21
153
+ [ Wed Sep 14 20:03:40 2022 ] Batch(39/243) done. Loss: 0.3485 lr:0.100000 network_time: 0.0327
154
+ [ Wed Sep 14 20:04:52 2022 ] Batch(139/243) done. Loss: 0.5871 lr:0.100000 network_time: 0.0248
155
+ [ Wed Sep 14 20:06:05 2022 ] Batch(239/243) done. Loss: 0.4812 lr:0.100000 network_time: 0.0274
156
+ [ Wed Sep 14 20:06:07 2022 ] Eval epoch: 21
157
+ [ Wed Sep 14 20:07:41 2022 ] Mean test loss of 796 batches: 2.771573543548584.
158
+ [ Wed Sep 14 20:07:42 2022 ] Top1: 38.53%
159
+ [ Wed Sep 14 20:07:42 2022 ] Top5: 72.56%
160
+ [ Wed Sep 14 20:07:42 2022 ] Training epoch: 22
161
+ [ Wed Sep 14 20:08:56 2022 ] Batch(96/243) done. Loss: 0.6043 lr:0.100000 network_time: 0.0308
162
+ [ Wed Sep 14 20:10:08 2022 ] Batch(196/243) done. Loss: 0.6230 lr:0.100000 network_time: 0.0328
163
+ [ Wed Sep 14 20:10:42 2022 ] Eval epoch: 22
164
+ [ Wed Sep 14 20:12:16 2022 ] Mean test loss of 796 batches: 2.2919068336486816.
165
+ [ Wed Sep 14 20:12:16 2022 ] Top1: 46.96%
166
+ [ Wed Sep 14 20:12:17 2022 ] Top5: 78.88%
167
+ [ Wed Sep 14 20:12:17 2022 ] Training epoch: 23
168
+ [ Wed Sep 14 20:12:59 2022 ] Batch(53/243) done. Loss: 0.3300 lr:0.100000 network_time: 0.0280
169
+ [ Wed Sep 14 20:14:11 2022 ] Batch(153/243) done. Loss: 0.7194 lr:0.100000 network_time: 0.0271
170
+ [ Wed Sep 14 20:15:16 2022 ] Eval epoch: 23
171
+ [ Wed Sep 14 20:16:50 2022 ] Mean test loss of 796 batches: 2.9069929122924805.
172
+ [ Wed Sep 14 20:16:50 2022 ] Top1: 38.25%
173
+ [ Wed Sep 14 20:16:51 2022 ] Top5: 70.40%
174
+ [ Wed Sep 14 20:16:51 2022 ] Training epoch: 24
175
+ [ Wed Sep 14 20:17:02 2022 ] Batch(10/243) done. Loss: 0.4030 lr:0.100000 network_time: 0.0263
176
+ [ Wed Sep 14 20:18:14 2022 ] Batch(110/243) done. Loss: 0.5649 lr:0.100000 network_time: 0.0275
177
+ [ Wed Sep 14 20:19:27 2022 ] Batch(210/243) done. Loss: 0.4009 lr:0.100000 network_time: 0.0319
178
+ [ Wed Sep 14 20:19:50 2022 ] Eval epoch: 24
179
+ [ Wed Sep 14 20:21:24 2022 ] Mean test loss of 796 batches: 2.52933931350708.
180
+ [ Wed Sep 14 20:21:24 2022 ] Top1: 43.88%
181
+ [ Wed Sep 14 20:21:25 2022 ] Top5: 75.25%
182
+ [ Wed Sep 14 20:21:25 2022 ] Training epoch: 25
183
+ [ Wed Sep 14 20:22:17 2022 ] Batch(67/243) done. Loss: 0.4913 lr:0.100000 network_time: 0.0322
184
+ [ Wed Sep 14 20:23:29 2022 ] Batch(167/243) done. Loss: 0.4311 lr:0.100000 network_time: 0.0260
185
+ [ Wed Sep 14 20:24:24 2022 ] Eval epoch: 25
186
+ [ Wed Sep 14 20:25:58 2022 ] Mean test loss of 796 batches: 2.5313878059387207.
187
+ [ Wed Sep 14 20:25:58 2022 ] Top1: 45.00%
188
+ [ Wed Sep 14 20:25:58 2022 ] Top5: 77.09%
189
+ [ Wed Sep 14 20:25:59 2022 ] Training epoch: 26
190
+ [ Wed Sep 14 20:26:20 2022 ] Batch(24/243) done. Loss: 0.4224 lr:0.100000 network_time: 0.0263
191
+ [ Wed Sep 14 20:27:32 2022 ] Batch(124/243) done. Loss: 0.3813 lr:0.100000 network_time: 0.0280
192
+ [ Wed Sep 14 20:28:45 2022 ] Batch(224/243) done. Loss: 0.4928 lr:0.100000 network_time: 0.0277
193
+ [ Wed Sep 14 20:28:58 2022 ] Eval epoch: 26
194
+ [ Wed Sep 14 20:30:32 2022 ] Mean test loss of 796 batches: 2.687304973602295.
195
+ [ Wed Sep 14 20:30:32 2022 ] Top1: 43.42%
196
+ [ Wed Sep 14 20:30:33 2022 ] Top5: 75.80%
197
+ [ Wed Sep 14 20:30:33 2022 ] Training epoch: 27
198
+ [ Wed Sep 14 20:31:35 2022 ] Batch(81/243) done. Loss: 0.3775 lr:0.100000 network_time: 0.0259
199
+ [ Wed Sep 14 20:32:48 2022 ] Batch(181/243) done. Loss: 0.4857 lr:0.100000 network_time: 0.0270
200
+ [ Wed Sep 14 20:33:33 2022 ] Eval epoch: 27
201
+ [ Wed Sep 14 20:35:06 2022 ] Mean test loss of 796 batches: 2.4036240577697754.
202
+ [ Wed Sep 14 20:35:06 2022 ] Top1: 44.58%
203
+ [ Wed Sep 14 20:35:06 2022 ] Top5: 78.53%
204
+ [ Wed Sep 14 20:35:06 2022 ] Training epoch: 28
205
+ [ Wed Sep 14 20:35:38 2022 ] Batch(38/243) done. Loss: 0.1812 lr:0.100000 network_time: 0.0274
206
+ [ Wed Sep 14 20:36:50 2022 ] Batch(138/243) done. Loss: 0.4930 lr:0.100000 network_time: 0.0282
207
+ [ Wed Sep 14 20:38:03 2022 ] Batch(238/243) done. Loss: 0.5984 lr:0.100000 network_time: 0.0271
208
+ [ Wed Sep 14 20:38:06 2022 ] Eval epoch: 28
209
+ [ Wed Sep 14 20:39:39 2022 ] Mean test loss of 796 batches: 2.756429433822632.
210
+ [ Wed Sep 14 20:39:40 2022 ] Top1: 41.86%
211
+ [ Wed Sep 14 20:39:40 2022 ] Top5: 74.64%
212
+ [ Wed Sep 14 20:39:40 2022 ] Training epoch: 29
213
+ [ Wed Sep 14 20:40:53 2022 ] Batch(95/243) done. Loss: 0.4065 lr:0.100000 network_time: 0.0305
214
+ [ Wed Sep 14 20:42:06 2022 ] Batch(195/243) done. Loss: 0.6086 lr:0.100000 network_time: 0.0269
215
+ [ Wed Sep 14 20:42:40 2022 ] Eval epoch: 29
216
+ [ Wed Sep 14 20:44:13 2022 ] Mean test loss of 796 batches: 2.7290701866149902.
217
+ [ Wed Sep 14 20:44:13 2022 ] Top1: 42.17%
218
+ [ Wed Sep 14 20:44:14 2022 ] Top5: 73.98%
219
+ [ Wed Sep 14 20:44:14 2022 ] Training epoch: 30
220
+ [ Wed Sep 14 20:44:55 2022 ] Batch(52/243) done. Loss: 0.2625 lr:0.100000 network_time: 0.0307
221
+ [ Wed Sep 14 20:46:08 2022 ] Batch(152/243) done. Loss: 0.4039 lr:0.100000 network_time: 0.0273
222
+ [ Wed Sep 14 20:47:13 2022 ] Eval epoch: 30
223
+ [ Wed Sep 14 20:48:46 2022 ] Mean test loss of 796 batches: 2.819765090942383.
224
+ [ Wed Sep 14 20:48:47 2022 ] Top1: 41.09%
225
+ [ Wed Sep 14 20:48:47 2022 ] Top5: 74.95%
226
+ [ Wed Sep 14 20:48:47 2022 ] Training epoch: 31
227
+ [ Wed Sep 14 20:48:57 2022 ] Batch(9/243) done. Loss: 0.3648 lr:0.100000 network_time: 0.0270
228
+ [ Wed Sep 14 20:50:10 2022 ] Batch(109/243) done. Loss: 0.2452 lr:0.100000 network_time: 0.0319
229
+ [ Wed Sep 14 20:51:23 2022 ] Batch(209/243) done. Loss: 0.4705 lr:0.100000 network_time: 0.0275
230
+ [ Wed Sep 14 20:51:47 2022 ] Eval epoch: 31
231
+ [ Wed Sep 14 20:53:20 2022 ] Mean test loss of 796 batches: 2.6726372241973877.
232
+ [ Wed Sep 14 20:53:21 2022 ] Top1: 43.70%
233
+ [ Wed Sep 14 20:53:21 2022 ] Top5: 76.80%
234
+ [ Wed Sep 14 20:53:21 2022 ] Training epoch: 32
235
+ [ Wed Sep 14 20:54:13 2022 ] Batch(66/243) done. Loss: 0.4521 lr:0.100000 network_time: 0.0319
236
+ [ Wed Sep 14 20:55:25 2022 ] Batch(166/243) done. Loss: 0.7311 lr:0.100000 network_time: 0.0317
237
+ [ Wed Sep 14 20:56:21 2022 ] Eval epoch: 32
238
+ [ Wed Sep 14 20:57:55 2022 ] Mean test loss of 796 batches: 2.6101021766662598.
239
+ [ Wed Sep 14 20:57:55 2022 ] Top1: 44.74%
240
+ [ Wed Sep 14 20:57:55 2022 ] Top5: 75.74%
241
+ [ Wed Sep 14 20:57:56 2022 ] Training epoch: 33
242
+ [ Wed Sep 14 20:58:16 2022 ] Batch(23/243) done. Loss: 0.4080 lr:0.100000 network_time: 0.0304
243
+ [ Wed Sep 14 20:59:29 2022 ] Batch(123/243) done. Loss: 0.5928 lr:0.100000 network_time: 0.0263
244
+ [ Wed Sep 14 21:00:41 2022 ] Batch(223/243) done. Loss: 0.5750 lr:0.100000 network_time: 0.0277
245
+ [ Wed Sep 14 21:00:55 2022 ] Eval epoch: 33
246
+ [ Wed Sep 14 21:02:29 2022 ] Mean test loss of 796 batches: 2.4076733589172363.
247
+ [ Wed Sep 14 21:02:29 2022 ] Top1: 46.05%
248
+ [ Wed Sep 14 21:02:30 2022 ] Top5: 77.02%
249
+ [ Wed Sep 14 21:02:30 2022 ] Training epoch: 34
250
+ [ Wed Sep 14 21:03:32 2022 ] Batch(80/243) done. Loss: 0.3287 lr:0.100000 network_time: 0.0325
251
+ [ Wed Sep 14 21:04:44 2022 ] Batch(180/243) done. Loss: 0.4008 lr:0.100000 network_time: 0.0434
252
+ [ Wed Sep 14 21:05:30 2022 ] Eval epoch: 34
253
+ [ Wed Sep 14 21:07:03 2022 ] Mean test loss of 796 batches: 2.933490037918091.
254
+ [ Wed Sep 14 21:07:03 2022 ] Top1: 42.85%
255
+ [ Wed Sep 14 21:07:04 2022 ] Top5: 74.90%
256
+ [ Wed Sep 14 21:07:04 2022 ] Training epoch: 35
257
+ [ Wed Sep 14 21:07:34 2022 ] Batch(37/243) done. Loss: 0.1931 lr:0.100000 network_time: 0.0271
258
+ [ Wed Sep 14 21:08:47 2022 ] Batch(137/243) done. Loss: 0.5033 lr:0.100000 network_time: 0.0258
259
+ [ Wed Sep 14 21:10:00 2022 ] Batch(237/243) done. Loss: 0.7209 lr:0.100000 network_time: 0.0233
260
+ [ Wed Sep 14 21:10:04 2022 ] Eval epoch: 35
261
+ [ Wed Sep 14 21:11:37 2022 ] Mean test loss of 796 batches: 2.4481589794158936.
262
+ [ Wed Sep 14 21:11:37 2022 ] Top1: 46.19%
263
+ [ Wed Sep 14 21:11:37 2022 ] Top5: 77.78%
264
+ [ Wed Sep 14 21:11:38 2022 ] Training epoch: 36
265
+ [ Wed Sep 14 21:12:49 2022 ] Batch(94/243) done. Loss: 0.3159 lr:0.100000 network_time: 0.0268
266
+ [ Wed Sep 14 21:14:02 2022 ] Batch(194/243) done. Loss: 0.3204 lr:0.100000 network_time: 0.0275
267
+ [ Wed Sep 14 21:14:37 2022 ] Eval epoch: 36
268
+ [ Wed Sep 14 21:16:10 2022 ] Mean test loss of 796 batches: 2.854403495788574.
269
+ [ Wed Sep 14 21:16:11 2022 ] Top1: 43.88%
270
+ [ Wed Sep 14 21:16:11 2022 ] Top5: 76.34%
271
+ [ Wed Sep 14 21:16:11 2022 ] Training epoch: 37
272
+ [ Wed Sep 14 21:16:52 2022 ] Batch(51/243) done. Loss: 0.3624 lr:0.100000 network_time: 0.0278
273
+ [ Wed Sep 14 21:18:05 2022 ] Batch(151/243) done. Loss: 0.3338 lr:0.100000 network_time: 0.0279
274
+ [ Wed Sep 14 21:19:11 2022 ] Eval epoch: 37
275
+ [ Wed Sep 14 21:20:45 2022 ] Mean test loss of 796 batches: 2.4435677528381348.
276
+ [ Wed Sep 14 21:20:45 2022 ] Top1: 46.90%
277
+ [ Wed Sep 14 21:20:46 2022 ] Top5: 79.11%
278
+ [ Wed Sep 14 21:20:46 2022 ] Training epoch: 38
279
+ [ Wed Sep 14 21:20:55 2022 ] Batch(8/243) done. Loss: 0.5167 lr:0.100000 network_time: 0.0284
280
+ [ Wed Sep 14 21:22:08 2022 ] Batch(108/243) done. Loss: 0.4417 lr:0.100000 network_time: 0.0313
281
+ [ Wed Sep 14 21:23:21 2022 ] Batch(208/243) done. Loss: 0.4753 lr:0.100000 network_time: 0.0275
282
+ [ Wed Sep 14 21:23:46 2022 ] Eval epoch: 38
283
+ [ Wed Sep 14 21:25:19 2022 ] Mean test loss of 796 batches: 2.551591396331787.
284
+ [ Wed Sep 14 21:25:20 2022 ] Top1: 46.89%
285
+ [ Wed Sep 14 21:25:20 2022 ] Top5: 77.84%
286
+ [ Wed Sep 14 21:25:20 2022 ] Training epoch: 39
287
+ [ Wed Sep 14 21:26:11 2022 ] Batch(65/243) done. Loss: 0.3768 lr:0.100000 network_time: 0.0279
288
+ [ Wed Sep 14 21:27:24 2022 ] Batch(165/243) done. Loss: 0.4046 lr:0.100000 network_time: 0.0275
289
+ [ Wed Sep 14 21:28:20 2022 ] Eval epoch: 39
290
+ [ Wed Sep 14 21:29:53 2022 ] Mean test loss of 796 batches: 2.5131995677948.
291
+ [ Wed Sep 14 21:29:54 2022 ] Top1: 45.77%
292
+ [ Wed Sep 14 21:29:54 2022 ] Top5: 78.91%
293
+ [ Wed Sep 14 21:29:54 2022 ] Training epoch: 40
294
+ [ Wed Sep 14 21:30:14 2022 ] Batch(22/243) done. Loss: 0.3872 lr:0.100000 network_time: 0.0280
295
+ [ Wed Sep 14 21:31:26 2022 ] Batch(122/243) done. Loss: 0.2603 lr:0.100000 network_time: 0.0273
296
+ [ Wed Sep 14 21:32:39 2022 ] Batch(222/243) done. Loss: 0.2829 lr:0.100000 network_time: 0.0272
297
+ [ Wed Sep 14 21:32:54 2022 ] Eval epoch: 40
298
+ [ Wed Sep 14 21:34:27 2022 ] Mean test loss of 796 batches: 2.946911573410034.
299
+ [ Wed Sep 14 21:34:28 2022 ] Top1: 42.64%
300
+ [ Wed Sep 14 21:34:28 2022 ] Top5: 74.96%
301
+ [ Wed Sep 14 21:34:28 2022 ] Training epoch: 41
302
+ [ Wed Sep 14 21:35:29 2022 ] Batch(79/243) done. Loss: 0.3274 lr:0.100000 network_time: 0.0298
303
+ [ Wed Sep 14 21:36:42 2022 ] Batch(179/243) done. Loss: 0.2956 lr:0.100000 network_time: 0.0259
304
+ [ Wed Sep 14 21:37:28 2022 ] Eval epoch: 41
305
+ [ Wed Sep 14 21:39:02 2022 ] Mean test loss of 796 batches: 2.6605191230773926.
306
+ [ Wed Sep 14 21:39:02 2022 ] Top1: 43.38%
307
+ [ Wed Sep 14 21:39:03 2022 ] Top5: 75.65%
308
+ [ Wed Sep 14 21:39:03 2022 ] Training epoch: 42
309
+ [ Wed Sep 14 21:39:33 2022 ] Batch(36/243) done. Loss: 0.2754 lr:0.100000 network_time: 0.0283
310
+ [ Wed Sep 14 21:40:45 2022 ] Batch(136/243) done. Loss: 0.3378 lr:0.100000 network_time: 0.0277
311
+ [ Wed Sep 14 21:41:58 2022 ] Batch(236/243) done. Loss: 0.3659 lr:0.100000 network_time: 0.0262
312
+ [ Wed Sep 14 21:42:03 2022 ] Eval epoch: 42
313
+ [ Wed Sep 14 21:43:36 2022 ] Mean test loss of 796 batches: 2.729982614517212.
314
+ [ Wed Sep 14 21:43:36 2022 ] Top1: 44.67%
315
+ [ Wed Sep 14 21:43:36 2022 ] Top5: 76.83%
316
+ [ Wed Sep 14 21:43:37 2022 ] Training epoch: 43
317
+ [ Wed Sep 14 21:44:48 2022 ] Batch(93/243) done. Loss: 0.2193 lr:0.100000 network_time: 0.0273
318
+ [ Wed Sep 14 21:46:00 2022 ] Batch(193/243) done. Loss: 0.4389 lr:0.100000 network_time: 0.0278
319
+ [ Wed Sep 14 21:46:36 2022 ] Eval epoch: 43
320
+ [ Wed Sep 14 21:48:10 2022 ] Mean test loss of 796 batches: 2.8778491020202637.
321
+ [ Wed Sep 14 21:48:10 2022 ] Top1: 41.51%
322
+ [ Wed Sep 14 21:48:11 2022 ] Top5: 73.55%
323
+ [ Wed Sep 14 21:48:11 2022 ] Training epoch: 44
324
+ [ Wed Sep 14 21:48:51 2022 ] Batch(50/243) done. Loss: 0.3618 lr:0.100000 network_time: 0.0273
325
+ [ Wed Sep 14 21:50:04 2022 ] Batch(150/243) done. Loss: 0.3614 lr:0.100000 network_time: 0.0284
326
+ [ Wed Sep 14 21:51:11 2022 ] Eval epoch: 44
327
+ [ Wed Sep 14 21:52:44 2022 ] Mean test loss of 796 batches: 2.6658835411071777.
328
+ [ Wed Sep 14 21:52:45 2022 ] Top1: 43.96%
329
+ [ Wed Sep 14 21:52:45 2022 ] Top5: 76.84%
330
+ [ Wed Sep 14 21:52:45 2022 ] Training epoch: 45
331
+ [ Wed Sep 14 21:52:54 2022 ] Batch(7/243) done. Loss: 0.2149 lr:0.100000 network_time: 0.0261
332
+ [ Wed Sep 14 21:54:07 2022 ] Batch(107/243) done. Loss: 0.4532 lr:0.100000 network_time: 0.0282
333
+ [ Wed Sep 14 21:55:19 2022 ] Batch(207/243) done. Loss: 0.2903 lr:0.100000 network_time: 0.0274
334
+ [ Wed Sep 14 21:55:45 2022 ] Eval epoch: 45
335
+ [ Wed Sep 14 21:57:18 2022 ] Mean test loss of 796 batches: 2.703622579574585.
336
+ [ Wed Sep 14 21:57:19 2022 ] Top1: 46.68%
337
+ [ Wed Sep 14 21:57:19 2022 ] Top5: 78.64%
338
+ [ Wed Sep 14 21:57:19 2022 ] Training epoch: 46
339
+ [ Wed Sep 14 21:58:09 2022 ] Batch(64/243) done. Loss: 0.1826 lr:0.100000 network_time: 0.0270
340
+ [ Wed Sep 14 21:59:22 2022 ] Batch(164/243) done. Loss: 0.3657 lr:0.100000 network_time: 0.0279
341
+ [ Wed Sep 14 22:00:19 2022 ] Eval epoch: 46
342
+ [ Wed Sep 14 22:01:52 2022 ] Mean test loss of 796 batches: 3.0341153144836426.
343
+ [ Wed Sep 14 22:01:52 2022 ] Top1: 42.14%
344
+ [ Wed Sep 14 22:01:53 2022 ] Top5: 72.97%
345
+ [ Wed Sep 14 22:01:53 2022 ] Training epoch: 47
346
+ [ Wed Sep 14 22:02:12 2022 ] Batch(21/243) done. Loss: 0.2316 lr:0.100000 network_time: 0.0262
347
+ [ Wed Sep 14 22:03:25 2022 ] Batch(121/243) done. Loss: 0.3387 lr:0.100000 network_time: 0.0313
348
+ [ Wed Sep 14 22:04:37 2022 ] Batch(221/243) done. Loss: 0.2902 lr:0.100000 network_time: 0.0281
349
+ [ Wed Sep 14 22:04:53 2022 ] Eval epoch: 47
350
+ [ Wed Sep 14 22:06:26 2022 ] Mean test loss of 796 batches: 2.495844602584839.
351
+ [ Wed Sep 14 22:06:26 2022 ] Top1: 46.27%
352
+ [ Wed Sep 14 22:06:27 2022 ] Top5: 78.13%
353
+ [ Wed Sep 14 22:06:27 2022 ] Training epoch: 48
354
+ [ Wed Sep 14 22:07:27 2022 ] Batch(78/243) done. Loss: 0.2657 lr:0.100000 network_time: 0.0297
355
+ [ Wed Sep 14 22:08:40 2022 ] Batch(178/243) done. Loss: 0.3470 lr:0.100000 network_time: 0.0312
356
+ [ Wed Sep 14 22:09:27 2022 ] Eval epoch: 48
357
+ [ Wed Sep 14 22:11:00 2022 ] Mean test loss of 796 batches: 2.7453110218048096.
358
+ [ Wed Sep 14 22:11:00 2022 ] Top1: 47.05%
359
+ [ Wed Sep 14 22:11:00 2022 ] Top5: 78.73%
360
+ [ Wed Sep 14 22:11:01 2022 ] Training epoch: 49
361
+ [ Wed Sep 14 22:11:29 2022 ] Batch(35/243) done. Loss: 0.3071 lr:0.100000 network_time: 0.0309
362
+ [ Wed Sep 14 22:12:42 2022 ] Batch(135/243) done. Loss: 0.3078 lr:0.100000 network_time: 0.0274
363
+ [ Wed Sep 14 22:13:55 2022 ] Batch(235/243) done. Loss: 0.4095 lr:0.100000 network_time: 0.0273
364
+ [ Wed Sep 14 22:14:00 2022 ] Eval epoch: 49
365
+ [ Wed Sep 14 22:15:33 2022 ] Mean test loss of 796 batches: 2.788438081741333.
366
+ [ Wed Sep 14 22:15:34 2022 ] Top1: 45.89%
367
+ [ Wed Sep 14 22:15:34 2022 ] Top5: 76.79%
368
+ [ Wed Sep 14 22:15:34 2022 ] Training epoch: 50
369
+ [ Wed Sep 14 22:16:45 2022 ] Batch(92/243) done. Loss: 0.3190 lr:0.100000 network_time: 0.0286
370
+ [ Wed Sep 14 22:17:57 2022 ] Batch(192/243) done. Loss: 0.3876 lr:0.100000 network_time: 0.0266
371
+ [ Wed Sep 14 22:18:34 2022 ] Eval epoch: 50
372
+ [ Wed Sep 14 22:20:07 2022 ] Mean test loss of 796 batches: 2.6796646118164062.
373
+ [ Wed Sep 14 22:20:07 2022 ] Top1: 46.75%
374
+ [ Wed Sep 14 22:20:08 2022 ] Top5: 77.72%
375
+ [ Wed Sep 14 22:20:08 2022 ] Training epoch: 51
376
+ [ Wed Sep 14 22:20:47 2022 ] Batch(49/243) done. Loss: 0.3189 lr:0.100000 network_time: 0.0269
377
+ [ Wed Sep 14 22:22:00 2022 ] Batch(149/243) done. Loss: 0.3324 lr:0.100000 network_time: 0.0317
378
+ [ Wed Sep 14 22:23:08 2022 ] Eval epoch: 51
379
+ [ Wed Sep 14 22:24:41 2022 ] Mean test loss of 796 batches: 2.946209669113159.
380
+ [ Wed Sep 14 22:24:42 2022 ] Top1: 44.48%
381
+ [ Wed Sep 14 22:24:42 2022 ] Top5: 75.47%
382
+ [ Wed Sep 14 22:24:42 2022 ] Training epoch: 52
383
+ [ Wed Sep 14 22:24:50 2022 ] Batch(6/243) done. Loss: 0.2887 lr:0.100000 network_time: 0.0283
384
+ [ Wed Sep 14 22:26:03 2022 ] Batch(106/243) done. Loss: 0.1677 lr:0.100000 network_time: 0.0299
385
+ [ Wed Sep 14 22:27:16 2022 ] Batch(206/243) done. Loss: 0.2791 lr:0.100000 network_time: 0.0266
386
+ [ Wed Sep 14 22:27:42 2022 ] Eval epoch: 52
387
+ [ Wed Sep 14 22:29:16 2022 ] Mean test loss of 796 batches: 2.9767680168151855.
388
+ [ Wed Sep 14 22:29:16 2022 ] Top1: 41.63%
389
+ [ Wed Sep 14 22:29:16 2022 ] Top5: 74.87%
390
+ [ Wed Sep 14 22:29:17 2022 ] Training epoch: 53
391
+ [ Wed Sep 14 22:30:06 2022 ] Batch(63/243) done. Loss: 0.2952 lr:0.100000 network_time: 0.0280
392
+ [ Wed Sep 14 22:31:19 2022 ] Batch(163/243) done. Loss: 0.4212 lr:0.100000 network_time: 0.0272
393
+ [ Wed Sep 14 22:32:16 2022 ] Eval epoch: 53
394
+ [ Wed Sep 14 22:33:50 2022 ] Mean test loss of 796 batches: 2.531792163848877.
395
+ [ Wed Sep 14 22:33:50 2022 ] Top1: 47.83%
396
+ [ Wed Sep 14 22:33:50 2022 ] Top5: 79.02%
397
+ [ Wed Sep 14 22:33:51 2022 ] Training epoch: 54
398
+ [ Wed Sep 14 22:34:09 2022 ] Batch(20/243) done. Loss: 0.1304 lr:0.100000 network_time: 0.0279
399
+ [ Wed Sep 14 22:35:21 2022 ] Batch(120/243) done. Loss: 0.2150 lr:0.100000 network_time: 0.0272
400
+ [ Wed Sep 14 22:36:34 2022 ] Batch(220/243) done. Loss: 0.3651 lr:0.100000 network_time: 0.0263
401
+ [ Wed Sep 14 22:36:50 2022 ] Eval epoch: 54
402
+ [ Wed Sep 14 22:38:23 2022 ] Mean test loss of 796 batches: 2.5210258960723877.
403
+ [ Wed Sep 14 22:38:23 2022 ] Top1: 47.76%
404
+ [ Wed Sep 14 22:38:24 2022 ] Top5: 78.98%
405
+ [ Wed Sep 14 22:38:24 2022 ] Training epoch: 55
406
+ [ Wed Sep 14 22:39:23 2022 ] Batch(77/243) done. Loss: 0.2751 lr:0.100000 network_time: 0.0326
407
+ [ Wed Sep 14 22:40:36 2022 ] Batch(177/243) done. Loss: 0.3404 lr:0.100000 network_time: 0.0312
408
+ [ Wed Sep 14 22:41:24 2022 ] Eval epoch: 55
409
+ [ Wed Sep 14 22:42:57 2022 ] Mean test loss of 796 batches: 2.994967460632324.
410
+ [ Wed Sep 14 22:42:57 2022 ] Top1: 43.31%
411
+ [ Wed Sep 14 22:42:58 2022 ] Top5: 75.28%
412
+ [ Wed Sep 14 22:42:58 2022 ] Training epoch: 56
413
+ [ Wed Sep 14 22:43:27 2022 ] Batch(34/243) done. Loss: 0.1377 lr:0.100000 network_time: 0.0277
414
+ [ Wed Sep 14 22:44:39 2022 ] Batch(134/243) done. Loss: 0.3377 lr:0.100000 network_time: 0.0270
415
+ [ Wed Sep 14 22:45:52 2022 ] Batch(234/243) done. Loss: 0.2269 lr:0.100000 network_time: 0.0270
416
+ [ Wed Sep 14 22:45:58 2022 ] Eval epoch: 56
417
+ [ Wed Sep 14 22:47:31 2022 ] Mean test loss of 796 batches: 2.9663078784942627.
418
+ [ Wed Sep 14 22:47:31 2022 ] Top1: 43.08%
419
+ [ Wed Sep 14 22:47:32 2022 ] Top5: 74.80%
420
+ [ Wed Sep 14 22:47:32 2022 ] Training epoch: 57
421
+ [ Wed Sep 14 22:48:42 2022 ] Batch(91/243) done. Loss: 0.4191 lr:0.100000 network_time: 0.0277
422
+ [ Wed Sep 14 22:49:55 2022 ] Batch(191/243) done. Loss: 0.2231 lr:0.100000 network_time: 0.0283
423
+ [ Wed Sep 14 22:50:32 2022 ] Eval epoch: 57
424
+ [ Wed Sep 14 22:52:05 2022 ] Mean test loss of 796 batches: 2.6926229000091553.
425
+ [ Wed Sep 14 22:52:05 2022 ] Top1: 47.01%
426
+ [ Wed Sep 14 22:52:06 2022 ] Top5: 77.91%
427
+ [ Wed Sep 14 22:52:06 2022 ] Training epoch: 58
428
+ [ Wed Sep 14 22:52:44 2022 ] Batch(48/243) done. Loss: 0.3491 lr:0.100000 network_time: 0.0275
429
+ [ Wed Sep 14 22:53:57 2022 ] Batch(148/243) done. Loss: 0.2899 lr:0.100000 network_time: 0.0272
430
+ [ Wed Sep 14 22:55:06 2022 ] Eval epoch: 58
431
+ [ Wed Sep 14 22:56:39 2022 ] Mean test loss of 796 batches: 2.6085290908813477.
432
+ [ Wed Sep 14 22:56:40 2022 ] Top1: 46.58%
433
+ [ Wed Sep 14 22:56:40 2022 ] Top5: 77.53%
434
+ [ Wed Sep 14 22:56:40 2022 ] Training epoch: 59
435
+ [ Wed Sep 14 22:56:47 2022 ] Batch(5/243) done. Loss: 0.3443 lr:0.100000 network_time: 0.0277
436
+ [ Wed Sep 14 22:58:00 2022 ] Batch(105/243) done. Loss: 0.2943 lr:0.100000 network_time: 0.0280
437
+ [ Wed Sep 14 22:59:13 2022 ] Batch(205/243) done. Loss: 0.3257 lr:0.100000 network_time: 0.0272
438
+ [ Wed Sep 14 22:59:40 2022 ] Eval epoch: 59
439
+ [ Wed Sep 14 23:01:13 2022 ] Mean test loss of 796 batches: 2.7719128131866455.
440
+ [ Wed Sep 14 23:01:14 2022 ] Top1: 42.90%
441
+ [ Wed Sep 14 23:01:14 2022 ] Top5: 76.71%
442
+ [ Wed Sep 14 23:01:15 2022 ] Training epoch: 60
443
+ [ Wed Sep 14 23:02:03 2022 ] Batch(62/243) done. Loss: 0.1431 lr:0.100000 network_time: 0.0277
444
+ [ Wed Sep 14 23:03:16 2022 ] Batch(162/243) done. Loss: 0.2946 lr:0.100000 network_time: 0.0275
445
+ [ Wed Sep 14 23:04:15 2022 ] Eval epoch: 60
446
+ [ Wed Sep 14 23:05:48 2022 ] Mean test loss of 796 batches: 2.693972587585449.
447
+ [ Wed Sep 14 23:05:48 2022 ] Top1: 46.52%
448
+ [ Wed Sep 14 23:05:49 2022 ] Top5: 79.53%
449
+ [ Wed Sep 14 23:05:49 2022 ] Training epoch: 61
450
+ [ Wed Sep 14 23:06:06 2022 ] Batch(19/243) done. Loss: 0.2575 lr:0.010000 network_time: 0.0274
451
+ [ Wed Sep 14 23:07:19 2022 ] Batch(119/243) done. Loss: 0.1340 lr:0.010000 network_time: 0.0308
452
+ [ Wed Sep 14 23:08:32 2022 ] Batch(219/243) done. Loss: 0.0472 lr:0.010000 network_time: 0.0275
453
+ [ Wed Sep 14 23:08:48 2022 ] Eval epoch: 61
454
+ [ Wed Sep 14 23:10:21 2022 ] Mean test loss of 796 batches: 2.258368492126465.
455
+ [ Wed Sep 14 23:10:22 2022 ] Top1: 52.89%
456
+ [ Wed Sep 14 23:10:22 2022 ] Top5: 82.95%
457
+ [ Wed Sep 14 23:10:23 2022 ] Training epoch: 62
458
+ [ Wed Sep 14 23:11:21 2022 ] Batch(76/243) done. Loss: 0.0563 lr:0.010000 network_time: 0.0273
459
+ [ Wed Sep 14 23:12:34 2022 ] Batch(176/243) done. Loss: 0.1186 lr:0.010000 network_time: 0.0261
460
+ [ Wed Sep 14 23:13:22 2022 ] Eval epoch: 62
461
+ [ Wed Sep 14 23:14:56 2022 ] Mean test loss of 796 batches: 2.279740810394287.
462
+ [ Wed Sep 14 23:14:56 2022 ] Top1: 53.11%
463
+ [ Wed Sep 14 23:14:56 2022 ] Top5: 82.97%
464
+ [ Wed Sep 14 23:14:57 2022 ] Training epoch: 63
465
+ [ Wed Sep 14 23:15:24 2022 ] Batch(33/243) done. Loss: 0.1131 lr:0.010000 network_time: 0.0274
466
+ [ Wed Sep 14 23:16:37 2022 ] Batch(133/243) done. Loss: 0.0521 lr:0.010000 network_time: 0.0284
467
+ [ Wed Sep 14 23:17:50 2022 ] Batch(233/243) done. Loss: 0.0699 lr:0.010000 network_time: 0.0274
468
+ [ Wed Sep 14 23:17:57 2022 ] Eval epoch: 63
469
+ [ Wed Sep 14 23:19:30 2022 ] Mean test loss of 796 batches: 2.2916676998138428.
470
+ [ Wed Sep 14 23:19:30 2022 ] Top1: 53.36%
471
+ [ Wed Sep 14 23:19:31 2022 ] Top5: 83.03%
472
+ [ Wed Sep 14 23:19:31 2022 ] Training epoch: 64
473
+ [ Wed Sep 14 23:20:40 2022 ] Batch(90/243) done. Loss: 0.0341 lr:0.010000 network_time: 0.0263
474
+ [ Wed Sep 14 23:21:53 2022 ] Batch(190/243) done. Loss: 0.0185 lr:0.010000 network_time: 0.0279
475
+ [ Wed Sep 14 23:22:31 2022 ] Eval epoch: 64
476
+ [ Wed Sep 14 23:24:04 2022 ] Mean test loss of 796 batches: 2.2869088649749756.
477
+ [ Wed Sep 14 23:24:04 2022 ] Top1: 53.79%
478
+ [ Wed Sep 14 23:24:05 2022 ] Top5: 83.33%
479
+ [ Wed Sep 14 23:24:05 2022 ] Training epoch: 65
480
+ [ Wed Sep 14 23:24:43 2022 ] Batch(47/243) done. Loss: 0.0168 lr:0.010000 network_time: 0.0316
481
+ [ Wed Sep 14 23:25:56 2022 ] Batch(147/243) done. Loss: 0.1102 lr:0.010000 network_time: 0.0275
482
+ [ Wed Sep 14 23:27:05 2022 ] Eval epoch: 65
483
+ [ Wed Sep 14 23:28:38 2022 ] Mean test loss of 796 batches: 2.2452545166015625.
484
+ [ Wed Sep 14 23:28:38 2022 ] Top1: 54.37%
485
+ [ Wed Sep 14 23:28:39 2022 ] Top5: 83.70%
486
+ [ Wed Sep 14 23:28:39 2022 ] Training epoch: 66
487
+ [ Wed Sep 14 23:28:45 2022 ] Batch(4/243) done. Loss: 0.0400 lr:0.010000 network_time: 0.0312
488
+ [ Wed Sep 14 23:29:58 2022 ] Batch(104/243) done. Loss: 0.0174 lr:0.010000 network_time: 0.0328
489
+ [ Wed Sep 14 23:31:11 2022 ] Batch(204/243) done. Loss: 0.0698 lr:0.010000 network_time: 0.0290
490
+ [ Wed Sep 14 23:31:39 2022 ] Eval epoch: 66
491
+ [ Wed Sep 14 23:33:11 2022 ] Mean test loss of 796 batches: 2.329601287841797.
492
+ [ Wed Sep 14 23:33:12 2022 ] Top1: 54.02%
493
+ [ Wed Sep 14 23:33:12 2022 ] Top5: 83.33%
494
+ [ Wed Sep 14 23:33:12 2022 ] Training epoch: 67
495
+ [ Wed Sep 14 23:34:01 2022 ] Batch(61/243) done. Loss: 0.0428 lr:0.010000 network_time: 0.0375
496
+ [ Wed Sep 14 23:35:13 2022 ] Batch(161/243) done. Loss: 0.0339 lr:0.010000 network_time: 0.0270
497
+ [ Wed Sep 14 23:36:13 2022 ] Eval epoch: 67
498
+ [ Wed Sep 14 23:37:46 2022 ] Mean test loss of 796 batches: 2.3496899604797363.
499
+ [ Wed Sep 14 23:37:46 2022 ] Top1: 53.81%
500
+ [ Wed Sep 14 23:37:46 2022 ] Top5: 83.23%
501
+ [ Wed Sep 14 23:37:47 2022 ] Training epoch: 68
502
+ [ Wed Sep 14 23:38:04 2022 ] Batch(18/243) done. Loss: 0.0441 lr:0.010000 network_time: 0.0321
503
+ [ Wed Sep 14 23:39:16 2022 ] Batch(118/243) done. Loss: 0.0339 lr:0.010000 network_time: 0.0282
504
+ [ Wed Sep 14 23:40:29 2022 ] Batch(218/243) done. Loss: 0.0333 lr:0.010000 network_time: 0.0314
505
+ [ Wed Sep 14 23:40:47 2022 ] Eval epoch: 68
506
+ [ Wed Sep 14 23:42:20 2022 ] Mean test loss of 796 batches: 2.2746517658233643.
507
+ [ Wed Sep 14 23:42:20 2022 ] Top1: 54.56%
508
+ [ Wed Sep 14 23:42:21 2022 ] Top5: 83.64%
509
+ [ Wed Sep 14 23:42:21 2022 ] Training epoch: 69
510
+ [ Wed Sep 14 23:43:19 2022 ] Batch(75/243) done. Loss: 0.0304 lr:0.010000 network_time: 0.0301
511
+ [ Wed Sep 14 23:44:32 2022 ] Batch(175/243) done. Loss: 0.0474 lr:0.010000 network_time: 0.0283
512
+ [ Wed Sep 14 23:45:21 2022 ] Eval epoch: 69
513
+ [ Wed Sep 14 23:46:54 2022 ] Mean test loss of 796 batches: 2.3214352130889893.
514
+ [ Wed Sep 14 23:46:55 2022 ] Top1: 54.18%
515
+ [ Wed Sep 14 23:46:55 2022 ] Top5: 83.58%
516
+ [ Wed Sep 14 23:46:56 2022 ] Training epoch: 70
517
+ [ Wed Sep 14 23:47:22 2022 ] Batch(32/243) done. Loss: 0.0169 lr:0.010000 network_time: 0.0282
518
+ [ Wed Sep 14 23:48:35 2022 ] Batch(132/243) done. Loss: 0.0310 lr:0.010000 network_time: 0.0441
519
+ [ Wed Sep 14 23:49:48 2022 ] Batch(232/243) done. Loss: 0.0086 lr:0.010000 network_time: 0.0269
520
+ [ Wed Sep 14 23:49:55 2022 ] Eval epoch: 70
521
+ [ Wed Sep 14 23:51:28 2022 ] Mean test loss of 796 batches: 2.321195125579834.
522
+ [ Wed Sep 14 23:51:29 2022 ] Top1: 54.32%
523
+ [ Wed Sep 14 23:51:29 2022 ] Top5: 83.51%
524
+ [ Wed Sep 14 23:51:29 2022 ] Training epoch: 71
525
+ [ Wed Sep 14 23:52:38 2022 ] Batch(89/243) done. Loss: 0.0126 lr:0.010000 network_time: 0.0265
526
+ [ Wed Sep 14 23:53:50 2022 ] Batch(189/243) done. Loss: 0.0313 lr:0.010000 network_time: 0.0274
527
+ [ Wed Sep 14 23:54:29 2022 ] Eval epoch: 71
528
+ [ Wed Sep 14 23:56:02 2022 ] Mean test loss of 796 batches: 2.401102304458618.
529
+ [ Wed Sep 14 23:56:02 2022 ] Top1: 53.23%
530
+ [ Wed Sep 14 23:56:03 2022 ] Top5: 82.83%
531
+ [ Wed Sep 14 23:56:03 2022 ] Training epoch: 72
532
+ [ Wed Sep 14 23:56:40 2022 ] Batch(46/243) done. Loss: 0.0246 lr:0.010000 network_time: 0.0278
533
+ [ Wed Sep 14 23:57:53 2022 ] Batch(146/243) done. Loss: 0.0394 lr:0.010000 network_time: 0.0282
534
+ [ Wed Sep 14 23:59:03 2022 ] Eval epoch: 72
535
+ [ Thu Sep 15 00:00:36 2022 ] Mean test loss of 796 batches: 2.392033576965332.
536
+ [ Thu Sep 15 00:00:37 2022 ] Top1: 54.03%
537
+ [ Thu Sep 15 00:00:37 2022 ] Top5: 83.16%
538
+ [ Thu Sep 15 00:00:37 2022 ] Training epoch: 73
539
+ [ Thu Sep 15 00:00:43 2022 ] Batch(3/243) done. Loss: 0.0221 lr:0.010000 network_time: 0.0276
540
+ [ Thu Sep 15 00:01:56 2022 ] Batch(103/243) done. Loss: 0.0159 lr:0.010000 network_time: 0.0272
541
+ [ Thu Sep 15 00:03:08 2022 ] Batch(203/243) done. Loss: 0.0126 lr:0.010000 network_time: 0.0346
542
+ [ Thu Sep 15 00:03:37 2022 ] Eval epoch: 73
543
+ [ Thu Sep 15 00:05:10 2022 ] Mean test loss of 796 batches: 2.3562662601470947.
544
+ [ Thu Sep 15 00:05:10 2022 ] Top1: 54.12%
545
+ [ Thu Sep 15 00:05:11 2022 ] Top5: 83.33%
546
+ [ Thu Sep 15 00:05:11 2022 ] Training epoch: 74
547
+ [ Thu Sep 15 00:05:58 2022 ] Batch(60/243) done. Loss: 0.0294 lr:0.010000 network_time: 0.0278
548
+ [ Thu Sep 15 00:07:11 2022 ] Batch(160/243) done. Loss: 0.0119 lr:0.010000 network_time: 0.0279
549
+ [ Thu Sep 15 00:08:11 2022 ] Eval epoch: 74
550
+ [ Thu Sep 15 00:09:43 2022 ] Mean test loss of 796 batches: 2.4100046157836914.
551
+ [ Thu Sep 15 00:09:44 2022 ] Top1: 53.70%
552
+ [ Thu Sep 15 00:09:44 2022 ] Top5: 83.22%
553
+ [ Thu Sep 15 00:09:44 2022 ] Training epoch: 75
554
+ [ Thu Sep 15 00:10:00 2022 ] Batch(17/243) done. Loss: 0.0336 lr:0.010000 network_time: 0.0269
555
+ [ Thu Sep 15 00:11:13 2022 ] Batch(117/243) done. Loss: 0.0248 lr:0.010000 network_time: 0.0274
556
+ [ Thu Sep 15 00:12:26 2022 ] Batch(217/243) done. Loss: 0.0063 lr:0.010000 network_time: 0.0320
557
+ [ Thu Sep 15 00:12:44 2022 ] Eval epoch: 75
558
+ [ Thu Sep 15 00:14:17 2022 ] Mean test loss of 796 batches: 2.353419065475464.
559
+ [ Thu Sep 15 00:14:17 2022 ] Top1: 54.40%
560
+ [ Thu Sep 15 00:14:18 2022 ] Top5: 83.38%
561
+ [ Thu Sep 15 00:14:18 2022 ] Training epoch: 76
562
+ [ Thu Sep 15 00:15:15 2022 ] Batch(74/243) done. Loss: 0.0206 lr:0.010000 network_time: 0.0268
563
+ [ Thu Sep 15 00:16:28 2022 ] Batch(174/243) done. Loss: 0.0125 lr:0.010000 network_time: 0.0327
564
+ [ Thu Sep 15 00:17:18 2022 ] Eval epoch: 76
565
+ [ Thu Sep 15 00:18:51 2022 ] Mean test loss of 796 batches: 2.3497185707092285.
566
+ [ Thu Sep 15 00:18:51 2022 ] Top1: 54.24%
567
+ [ Thu Sep 15 00:18:52 2022 ] Top5: 83.60%
568
+ [ Thu Sep 15 00:18:52 2022 ] Training epoch: 77
569
+ [ Thu Sep 15 00:19:18 2022 ] Batch(31/243) done. Loss: 0.0244 lr:0.010000 network_time: 0.0286
570
+ [ Thu Sep 15 00:20:31 2022 ] Batch(131/243) done. Loss: 0.0173 lr:0.010000 network_time: 0.0315
571
+ [ Thu Sep 15 00:21:44 2022 ] Batch(231/243) done. Loss: 0.0246 lr:0.010000 network_time: 0.0267
572
+ [ Thu Sep 15 00:21:52 2022 ] Eval epoch: 77
573
+ [ Thu Sep 15 00:23:25 2022 ] Mean test loss of 796 batches: 2.3904168605804443.
574
+ [ Thu Sep 15 00:23:25 2022 ] Top1: 54.23%
575
+ [ Thu Sep 15 00:23:26 2022 ] Top5: 83.21%
576
+ [ Thu Sep 15 00:23:26 2022 ] Training epoch: 78
577
+ [ Thu Sep 15 00:24:33 2022 ] Batch(88/243) done. Loss: 0.0248 lr:0.010000 network_time: 0.0272
578
+ [ Thu Sep 15 00:25:46 2022 ] Batch(188/243) done. Loss: 0.0154 lr:0.010000 network_time: 0.0273
579
+ [ Thu Sep 15 00:26:25 2022 ] Eval epoch: 78
580
+ [ Thu Sep 15 00:27:58 2022 ] Mean test loss of 796 batches: 2.406836986541748.
581
+ [ Thu Sep 15 00:27:59 2022 ] Top1: 54.40%
582
+ [ Thu Sep 15 00:27:59 2022 ] Top5: 83.45%
583
+ [ Thu Sep 15 00:27:59 2022 ] Training epoch: 79
584
+ [ Thu Sep 15 00:28:35 2022 ] Batch(45/243) done. Loss: 0.0187 lr:0.010000 network_time: 0.0270
585
+ [ Thu Sep 15 00:29:48 2022 ] Batch(145/243) done. Loss: 0.0228 lr:0.010000 network_time: 0.0348
586
+ [ Thu Sep 15 00:30:59 2022 ] Eval epoch: 79
587
+ [ Thu Sep 15 00:32:32 2022 ] Mean test loss of 796 batches: 2.3902182579040527.
588
+ [ Thu Sep 15 00:32:32 2022 ] Top1: 54.54%
589
+ [ Thu Sep 15 00:32:33 2022 ] Top5: 83.39%
590
+ [ Thu Sep 15 00:32:33 2022 ] Training epoch: 80
591
+ [ Thu Sep 15 00:32:38 2022 ] Batch(2/243) done. Loss: 0.0107 lr:0.010000 network_time: 0.0275
592
+ [ Thu Sep 15 00:33:51 2022 ] Batch(102/243) done. Loss: 0.0209 lr:0.010000 network_time: 0.0309
593
+ [ Thu Sep 15 00:35:04 2022 ] Batch(202/243) done. Loss: 0.0197 lr:0.010000 network_time: 0.0264
594
+ [ Thu Sep 15 00:35:33 2022 ] Eval epoch: 80
595
+ [ Thu Sep 15 00:37:05 2022 ] Mean test loss of 796 batches: 2.3967974185943604.
596
+ [ Thu Sep 15 00:37:06 2022 ] Top1: 54.35%
597
+ [ Thu Sep 15 00:37:06 2022 ] Top5: 83.47%
598
+ [ Thu Sep 15 00:37:06 2022 ] Training epoch: 81
599
+ [ Thu Sep 15 00:37:53 2022 ] Batch(59/243) done. Loss: 0.0184 lr:0.001000 network_time: 0.0276
600
+ [ Thu Sep 15 00:39:06 2022 ] Batch(159/243) done. Loss: 0.0099 lr:0.001000 network_time: 0.0268
601
+ [ Thu Sep 15 00:40:06 2022 ] Eval epoch: 81
602
+ [ Thu Sep 15 00:41:38 2022 ] Mean test loss of 796 batches: 2.3979151248931885.
603
+ [ Thu Sep 15 00:41:39 2022 ] Top1: 54.10%
604
+ [ Thu Sep 15 00:41:39 2022 ] Top5: 83.37%
605
+ [ Thu Sep 15 00:41:39 2022 ] Training epoch: 82
606
+ [ Thu Sep 15 00:41:55 2022 ] Batch(16/243) done. Loss: 0.0092 lr:0.001000 network_time: 0.0291
607
+ [ Thu Sep 15 00:43:08 2022 ] Batch(116/243) done. Loss: 0.0069 lr:0.001000 network_time: 0.0308
608
+ [ Thu Sep 15 00:44:20 2022 ] Batch(216/243) done. Loss: 0.0327 lr:0.001000 network_time: 0.0267
609
+ [ Thu Sep 15 00:44:40 2022 ] Eval epoch: 82
610
+ [ Thu Sep 15 00:46:12 2022 ] Mean test loss of 796 batches: 2.402644395828247.
611
+ [ Thu Sep 15 00:46:12 2022 ] Top1: 54.29%
612
+ [ Thu Sep 15 00:46:13 2022 ] Top5: 83.50%
613
+ [ Thu Sep 15 00:46:13 2022 ] Training epoch: 83
614
+ [ Thu Sep 15 00:47:09 2022 ] Batch(73/243) done. Loss: 0.0706 lr:0.001000 network_time: 0.0351
615
+ [ Thu Sep 15 00:48:22 2022 ] Batch(173/243) done. Loss: 0.0146 lr:0.001000 network_time: 0.0319
616
+ [ Thu Sep 15 00:49:13 2022 ] Eval epoch: 83
617
+ [ Thu Sep 15 00:50:45 2022 ] Mean test loss of 796 batches: 2.4071590900421143.
618
+ [ Thu Sep 15 00:50:46 2022 ] Top1: 54.04%
619
+ [ Thu Sep 15 00:50:46 2022 ] Top5: 83.16%
620
+ [ Thu Sep 15 00:50:46 2022 ] Training epoch: 84
621
+ [ Thu Sep 15 00:51:12 2022 ] Batch(30/243) done. Loss: 0.0100 lr:0.001000 network_time: 0.0293
622
+ [ Thu Sep 15 00:52:24 2022 ] Batch(130/243) done. Loss: 0.0056 lr:0.001000 network_time: 0.0277
623
+ [ Thu Sep 15 00:53:37 2022 ] Batch(230/243) done. Loss: 0.0132 lr:0.001000 network_time: 0.0276
624
+ [ Thu Sep 15 00:53:46 2022 ] Eval epoch: 84
625
+ [ Thu Sep 15 00:55:19 2022 ] Mean test loss of 796 batches: 2.37986159324646.
626
+ [ Thu Sep 15 00:55:19 2022 ] Top1: 54.50%
627
+ [ Thu Sep 15 00:55:20 2022 ] Top5: 83.56%
628
+ [ Thu Sep 15 00:55:20 2022 ] Training epoch: 85
629
+ [ Thu Sep 15 00:56:27 2022 ] Batch(87/243) done. Loss: 0.0309 lr:0.001000 network_time: 0.0269
630
+ [ Thu Sep 15 00:57:40 2022 ] Batch(187/243) done. Loss: 0.0386 lr:0.001000 network_time: 0.0278
631
+ [ Thu Sep 15 00:58:20 2022 ] Eval epoch: 85
632
+ [ Thu Sep 15 00:59:53 2022 ] Mean test loss of 796 batches: 2.370398759841919.
633
+ [ Thu Sep 15 00:59:53 2022 ] Top1: 54.49%
634
+ [ Thu Sep 15 00:59:54 2022 ] Top5: 83.59%
635
+ [ Thu Sep 15 00:59:54 2022 ] Training epoch: 86
636
+ [ Thu Sep 15 01:00:30 2022 ] Batch(44/243) done. Loss: 0.0253 lr:0.001000 network_time: 0.0271
637
+ [ Thu Sep 15 01:01:43 2022 ] Batch(144/243) done. Loss: 0.0103 lr:0.001000 network_time: 0.0267
638
+ [ Thu Sep 15 01:02:54 2022 ] Eval epoch: 86
639
+ [ Thu Sep 15 01:04:27 2022 ] Mean test loss of 796 batches: 2.41347599029541.
640
+ [ Thu Sep 15 01:04:27 2022 ] Top1: 54.08%
641
+ [ Thu Sep 15 01:04:28 2022 ] Top5: 83.20%
642
+ [ Thu Sep 15 01:04:28 2022 ] Training epoch: 87
643
+ [ Thu Sep 15 01:04:32 2022 ] Batch(1/243) done. Loss: 0.0237 lr:0.001000 network_time: 0.0335
644
+ [ Thu Sep 15 01:05:45 2022 ] Batch(101/243) done. Loss: 0.0139 lr:0.001000 network_time: 0.0310
645
+ [ Thu Sep 15 01:06:58 2022 ] Batch(201/243) done. Loss: 0.0094 lr:0.001000 network_time: 0.0274
646
+ [ Thu Sep 15 01:07:28 2022 ] Eval epoch: 87
647
+ [ Thu Sep 15 01:09:01 2022 ] Mean test loss of 796 batches: 2.3874456882476807.
648
+ [ Thu Sep 15 01:09:01 2022 ] Top1: 54.32%
649
+ [ Thu Sep 15 01:09:01 2022 ] Top5: 83.40%
650
+ [ Thu Sep 15 01:09:02 2022 ] Training epoch: 88
651
+ [ Thu Sep 15 01:09:47 2022 ] Batch(58/243) done. Loss: 0.0042 lr:0.001000 network_time: 0.0244
652
+ [ Thu Sep 15 01:11:00 2022 ] Batch(158/243) done. Loss: 0.0253 lr:0.001000 network_time: 0.0332
653
+ [ Thu Sep 15 01:12:01 2022 ] Eval epoch: 88
654
+ [ Thu Sep 15 01:13:34 2022 ] Mean test loss of 796 batches: 2.40490460395813.
655
+ [ Thu Sep 15 01:13:35 2022 ] Top1: 54.15%
656
+ [ Thu Sep 15 01:13:35 2022 ] Top5: 83.23%
657
+ [ Thu Sep 15 01:13:35 2022 ] Training epoch: 89
658
+ [ Thu Sep 15 01:13:50 2022 ] Batch(15/243) done. Loss: 0.0143 lr:0.001000 network_time: 0.0278
659
+ [ Thu Sep 15 01:15:02 2022 ] Batch(115/243) done. Loss: 0.0147 lr:0.001000 network_time: 0.0268
660
+ [ Thu Sep 15 01:16:15 2022 ] Batch(215/243) done. Loss: 0.0171 lr:0.001000 network_time: 0.0322
661
+ [ Thu Sep 15 01:16:35 2022 ] Eval epoch: 89
662
+ [ Thu Sep 15 01:18:08 2022 ] Mean test loss of 796 batches: 2.404038429260254.
663
+ [ Thu Sep 15 01:18:08 2022 ] Top1: 54.38%
664
+ [ Thu Sep 15 01:18:08 2022 ] Top5: 83.37%
665
+ [ Thu Sep 15 01:18:09 2022 ] Training epoch: 90
666
+ [ Thu Sep 15 01:19:04 2022 ] Batch(72/243) done. Loss: 0.0041 lr:0.001000 network_time: 0.0265
667
+ [ Thu Sep 15 01:20:17 2022 ] Batch(172/243) done. Loss: 0.0216 lr:0.001000 network_time: 0.0229
668
+ [ Thu Sep 15 01:21:08 2022 ] Eval epoch: 90
669
+ [ Thu Sep 15 01:22:42 2022 ] Mean test loss of 796 batches: 2.388312578201294.
670
+ [ Thu Sep 15 01:22:42 2022 ] Top1: 54.44%
671
+ [ Thu Sep 15 01:22:42 2022 ] Top5: 83.45%
672
+ [ Thu Sep 15 01:22:42 2022 ] Training epoch: 91
673
+ [ Thu Sep 15 01:23:07 2022 ] Batch(29/243) done. Loss: 0.0074 lr:0.001000 network_time: 0.0278
674
+ [ Thu Sep 15 01:24:20 2022 ] Batch(129/243) done. Loss: 0.0215 lr:0.001000 network_time: 0.0301
675
+ [ Thu Sep 15 01:25:33 2022 ] Batch(229/243) done. Loss: 0.0123 lr:0.001000 network_time: 0.0303
676
+ [ Thu Sep 15 01:25:42 2022 ] Eval epoch: 91
677
+ [ Thu Sep 15 01:27:15 2022 ] Mean test loss of 796 batches: 2.387159824371338.
678
+ [ Thu Sep 15 01:27:15 2022 ] Top1: 54.49%
679
+ [ Thu Sep 15 01:27:16 2022 ] Top5: 83.50%
680
+ [ Thu Sep 15 01:27:16 2022 ] Training epoch: 92
681
+ [ Thu Sep 15 01:28:22 2022 ] Batch(86/243) done. Loss: 0.0077 lr:0.001000 network_time: 0.0285
682
+ [ Thu Sep 15 01:29:35 2022 ] Batch(186/243) done. Loss: 0.0153 lr:0.001000 network_time: 0.0272
683
+ [ Thu Sep 15 01:30:16 2022 ] Eval epoch: 92
684
+ [ Thu Sep 15 01:31:49 2022 ] Mean test loss of 796 batches: 2.400331735610962.
685
+ [ Thu Sep 15 01:31:49 2022 ] Top1: 54.54%
686
+ [ Thu Sep 15 01:31:49 2022 ] Top5: 83.64%
687
+ [ Thu Sep 15 01:31:50 2022 ] Training epoch: 93
688
+ [ Thu Sep 15 01:32:24 2022 ] Batch(43/243) done. Loss: 0.0090 lr:0.001000 network_time: 0.0501
689
+ [ Thu Sep 15 01:33:37 2022 ] Batch(143/243) done. Loss: 0.0139 lr:0.001000 network_time: 0.0314
690
+ [ Thu Sep 15 01:34:49 2022 ] Eval epoch: 93
691
+ [ Thu Sep 15 01:36:22 2022 ] Mean test loss of 796 batches: 2.417159080505371.
692
+ [ Thu Sep 15 01:36:22 2022 ] Top1: 54.29%
693
+ [ Thu Sep 15 01:36:23 2022 ] Top5: 83.38%
694
+ [ Thu Sep 15 01:36:23 2022 ] Training epoch: 94
695
+ [ Thu Sep 15 01:36:27 2022 ] Batch(0/243) done. Loss: 0.0224 lr:0.001000 network_time: 0.0792
696
+ [ Thu Sep 15 01:37:40 2022 ] Batch(100/243) done. Loss: 0.0111 lr:0.001000 network_time: 0.0296
697
+ [ Thu Sep 15 01:38:52 2022 ] Batch(200/243) done. Loss: 0.0087 lr:0.001000 network_time: 0.0284
698
+ [ Thu Sep 15 01:39:23 2022 ] Eval epoch: 94
699
+ [ Thu Sep 15 01:40:56 2022 ] Mean test loss of 796 batches: 2.390749216079712.
700
+ [ Thu Sep 15 01:40:56 2022 ] Top1: 54.04%
701
+ [ Thu Sep 15 01:40:57 2022 ] Top5: 83.39%
702
+ [ Thu Sep 15 01:40:57 2022 ] Training epoch: 95
703
+ [ Thu Sep 15 01:41:42 2022 ] Batch(57/243) done. Loss: 0.0137 lr:0.001000 network_time: 0.0269
704
+ [ Thu Sep 15 01:42:55 2022 ] Batch(157/243) done. Loss: 0.0110 lr:0.001000 network_time: 0.0282
705
+ [ Thu Sep 15 01:43:57 2022 ] Eval epoch: 95
706
+ [ Thu Sep 15 01:45:29 2022 ] Mean test loss of 796 batches: 2.38088321685791.
707
+ [ Thu Sep 15 01:45:30 2022 ] Top1: 54.41%
708
+ [ Thu Sep 15 01:45:30 2022 ] Top5: 83.42%
709
+ [ Thu Sep 15 01:45:30 2022 ] Training epoch: 96
710
+ [ Thu Sep 15 01:45:44 2022 ] Batch(14/243) done. Loss: 0.0109 lr:0.001000 network_time: 0.0302
711
+ [ Thu Sep 15 01:46:57 2022 ] Batch(114/243) done. Loss: 0.0126 lr:0.001000 network_time: 0.0279
712
+ [ Thu Sep 15 01:48:09 2022 ] Batch(214/243) done. Loss: 0.0165 lr:0.001000 network_time: 0.0282
713
+ [ Thu Sep 15 01:48:30 2022 ] Eval epoch: 96
714
+ [ Thu Sep 15 01:50:03 2022 ] Mean test loss of 796 batches: 2.4235730171203613.
715
+ [ Thu Sep 15 01:50:03 2022 ] Top1: 54.09%
716
+ [ Thu Sep 15 01:50:03 2022 ] Top5: 83.24%
717
+ [ Thu Sep 15 01:50:04 2022 ] Training epoch: 97
718
+ [ Thu Sep 15 01:50:59 2022 ] Batch(71/243) done. Loss: 0.0114 lr:0.001000 network_time: 0.0276
719
+ [ Thu Sep 15 01:52:12 2022 ] Batch(171/243) done. Loss: 0.0131 lr:0.001000 network_time: 0.0273
720
+ [ Thu Sep 15 01:53:03 2022 ] Eval epoch: 97
721
+ [ Thu Sep 15 01:54:36 2022 ] Mean test loss of 796 batches: 2.4135332107543945.
722
+ [ Thu Sep 15 01:54:37 2022 ] Top1: 54.24%
723
+ [ Thu Sep 15 01:54:37 2022 ] Top5: 83.38%
724
+ [ Thu Sep 15 01:54:37 2022 ] Training epoch: 98
725
+ [ Thu Sep 15 01:55:01 2022 ] Batch(28/243) done. Loss: 0.0141 lr:0.001000 network_time: 0.0267
726
+ [ Thu Sep 15 01:56:14 2022 ] Batch(128/243) done. Loss: 0.0163 lr:0.001000 network_time: 0.0277
727
+ [ Thu Sep 15 01:57:27 2022 ] Batch(228/243) done. Loss: 0.0186 lr:0.001000 network_time: 0.0282
728
+ [ Thu Sep 15 01:57:37 2022 ] Eval epoch: 98
729
+ [ Thu Sep 15 01:59:10 2022 ] Mean test loss of 796 batches: 2.4122824668884277.
730
+ [ Thu Sep 15 01:59:10 2022 ] Top1: 54.46%
731
+ [ Thu Sep 15 01:59:10 2022 ] Top5: 83.42%
732
+ [ Thu Sep 15 01:59:11 2022 ] Training epoch: 99
733
+ [ Thu Sep 15 02:00:16 2022 ] Batch(85/243) done. Loss: 0.0143 lr:0.001000 network_time: 0.0274
734
+ [ Thu Sep 15 02:01:29 2022 ] Batch(185/243) done. Loss: 0.0190 lr:0.001000 network_time: 0.0283
735
+ [ Thu Sep 15 02:02:11 2022 ] Eval epoch: 99
736
+ [ Thu Sep 15 02:03:43 2022 ] Mean test loss of 796 batches: 2.417081117630005.
737
+ [ Thu Sep 15 02:03:44 2022 ] Top1: 54.22%
738
+ [ Thu Sep 15 02:03:44 2022 ] Top5: 83.36%
739
+ [ Thu Sep 15 02:03:44 2022 ] Training epoch: 100
740
+ [ Thu Sep 15 02:04:18 2022 ] Batch(42/243) done. Loss: 0.0297 lr:0.001000 network_time: 0.0278
741
+ [ Thu Sep 15 02:05:31 2022 ] Batch(142/243) done. Loss: 0.0040 lr:0.001000 network_time: 0.0267
742
+ [ Thu Sep 15 02:06:44 2022 ] Batch(242/243) done. Loss: 0.0077 lr:0.001000 network_time: 0.0278
743
+ [ Thu Sep 15 02:06:44 2022 ] Eval epoch: 100
744
+ [ Thu Sep 15 02:08:16 2022 ] Mean test loss of 796 batches: 2.3927650451660156.
745
+ [ Thu Sep 15 02:08:17 2022 ] Top1: 54.51%
746
+ [ Thu Sep 15 02:08:17 2022 ] Top5: 83.63%
747
+ [ Thu Sep 15 02:08:17 2022 ] Training epoch: 101
748
+ [ Thu Sep 15 02:09:33 2022 ] Batch(99/243) done. Loss: 0.0114 lr:0.000100 network_time: 0.0298
749
+ [ Thu Sep 15 02:10:46 2022 ] Batch(199/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0264
750
+ [ Thu Sep 15 02:11:17 2022 ] Eval epoch: 101
751
+ [ Thu Sep 15 02:12:50 2022 ] Mean test loss of 796 batches: 2.3835721015930176.
752
+ [ Thu Sep 15 02:12:50 2022 ] Top1: 54.72%
753
+ [ Thu Sep 15 02:12:50 2022 ] Top5: 83.62%
754
+ [ Thu Sep 15 02:12:51 2022 ] Training epoch: 102
755
+ [ Thu Sep 15 02:13:35 2022 ] Batch(56/243) done. Loss: 0.0081 lr:0.000100 network_time: 0.0329
756
+ [ Thu Sep 15 02:14:48 2022 ] Batch(156/243) done. Loss: 0.0097 lr:0.000100 network_time: 0.0268
757
+ [ Thu Sep 15 02:15:50 2022 ] Eval epoch: 102
758
+ [ Thu Sep 15 02:17:23 2022 ] Mean test loss of 796 batches: 2.402935028076172.
759
+ [ Thu Sep 15 02:17:23 2022 ] Top1: 54.45%
760
+ [ Thu Sep 15 02:17:24 2022 ] Top5: 83.50%
761
+ [ Thu Sep 15 02:17:24 2022 ] Training epoch: 103
762
+ [ Thu Sep 15 02:17:37 2022 ] Batch(13/243) done. Loss: 0.0062 lr:0.000100 network_time: 0.0318
763
+ [ Thu Sep 15 02:18:50 2022 ] Batch(113/243) done. Loss: 0.0048 lr:0.000100 network_time: 0.0277
764
+ [ Thu Sep 15 02:20:03 2022 ] Batch(213/243) done. Loss: 0.0063 lr:0.000100 network_time: 0.0273
765
+ [ Thu Sep 15 02:20:24 2022 ] Eval epoch: 103
766
+ [ Thu Sep 15 02:21:57 2022 ] Mean test loss of 796 batches: 2.4096858501434326.
767
+ [ Thu Sep 15 02:21:57 2022 ] Top1: 53.87%
768
+ [ Thu Sep 15 02:21:57 2022 ] Top5: 83.20%
769
+ [ Thu Sep 15 02:21:58 2022 ] Training epoch: 104
770
+ [ Thu Sep 15 02:22:52 2022 ] Batch(70/243) done. Loss: 0.0091 lr:0.000100 network_time: 0.0273
771
+ [ Thu Sep 15 02:24:05 2022 ] Batch(170/243) done. Loss: 0.0122 lr:0.000100 network_time: 0.0311
772
+ [ Thu Sep 15 02:24:57 2022 ] Eval epoch: 104
773
+ [ Thu Sep 15 02:26:30 2022 ] Mean test loss of 796 batches: 2.4239163398742676.
774
+ [ Thu Sep 15 02:26:30 2022 ] Top1: 54.40%
775
+ [ Thu Sep 15 02:26:31 2022 ] Top5: 83.51%
776
+ [ Thu Sep 15 02:26:31 2022 ] Training epoch: 105
777
+ [ Thu Sep 15 02:26:54 2022 ] Batch(27/243) done. Loss: 0.0324 lr:0.000100 network_time: 0.0311
778
+ [ Thu Sep 15 02:28:07 2022 ] Batch(127/243) done. Loss: 0.0078 lr:0.000100 network_time: 0.0313
779
+ [ Thu Sep 15 02:29:20 2022 ] Batch(227/243) done. Loss: 0.0403 lr:0.000100 network_time: 0.0328
780
+ [ Thu Sep 15 02:29:31 2022 ] Eval epoch: 105
781
+ [ Thu Sep 15 02:31:03 2022 ] Mean test loss of 796 batches: 2.446220874786377.
782
+ [ Thu Sep 15 02:31:04 2022 ] Top1: 53.80%
783
+ [ Thu Sep 15 02:31:04 2022 ] Top5: 83.13%
784
+ [ Thu Sep 15 02:31:04 2022 ] Training epoch: 106
785
+ [ Thu Sep 15 02:32:09 2022 ] Batch(84/243) done. Loss: 0.0101 lr:0.000100 network_time: 0.0266
786
+ [ Thu Sep 15 02:33:22 2022 ] Batch(184/243) done. Loss: 0.0108 lr:0.000100 network_time: 0.0369
787
+ [ Thu Sep 15 02:34:04 2022 ] Eval epoch: 106
788
+ [ Thu Sep 15 02:35:37 2022 ] Mean test loss of 796 batches: 2.384615659713745.
789
+ [ Thu Sep 15 02:35:38 2022 ] Top1: 54.44%
790
+ [ Thu Sep 15 02:35:38 2022 ] Top5: 83.58%
791
+ [ Thu Sep 15 02:35:38 2022 ] Training epoch: 107
792
+ [ Thu Sep 15 02:36:12 2022 ] Batch(41/243) done. Loss: 0.0059 lr:0.000100 network_time: 0.0267
793
+ [ Thu Sep 15 02:37:24 2022 ] Batch(141/243) done. Loss: 0.0183 lr:0.000100 network_time: 0.0275
794
+ [ Thu Sep 15 02:38:37 2022 ] Batch(241/243) done. Loss: 0.0087 lr:0.000100 network_time: 0.0357
795
+ [ Thu Sep 15 02:38:38 2022 ] Eval epoch: 107
796
+ [ Thu Sep 15 02:40:11 2022 ] Mean test loss of 796 batches: 2.3720390796661377.
797
+ [ Thu Sep 15 02:40:12 2022 ] Top1: 54.51%
798
+ [ Thu Sep 15 02:40:12 2022 ] Top5: 83.78%
799
+ [ Thu Sep 15 02:40:12 2022 ] Training epoch: 108
800
+ [ Thu Sep 15 02:41:27 2022 ] Batch(98/243) done. Loss: 0.0098 lr:0.000100 network_time: 0.0312
801
+ [ Thu Sep 15 02:42:40 2022 ] Batch(198/243) done. Loss: 0.0157 lr:0.000100 network_time: 0.0335
802
+ [ Thu Sep 15 02:43:12 2022 ] Eval epoch: 108
803
+ [ Thu Sep 15 02:44:45 2022 ] Mean test loss of 796 batches: 2.4088966846466064.
804
+ [ Thu Sep 15 02:44:45 2022 ] Top1: 54.23%
805
+ [ Thu Sep 15 02:44:46 2022 ] Top5: 83.49%
806
+ [ Thu Sep 15 02:44:46 2022 ] Training epoch: 109
807
+ [ Thu Sep 15 02:45:29 2022 ] Batch(55/243) done. Loss: 0.0081 lr:0.000100 network_time: 0.0278
808
+ [ Thu Sep 15 02:46:42 2022 ] Batch(155/243) done. Loss: 0.0096 lr:0.000100 network_time: 0.0290
809
+ [ Thu Sep 15 02:47:45 2022 ] Eval epoch: 109
810
+ [ Thu Sep 15 02:49:18 2022 ] Mean test loss of 796 batches: 2.4085729122161865.
811
+ [ Thu Sep 15 02:49:19 2022 ] Top1: 54.15%
812
+ [ Thu Sep 15 02:49:19 2022 ] Top5: 83.47%
813
+ [ Thu Sep 15 02:49:19 2022 ] Training epoch: 110
814
+ [ Thu Sep 15 02:49:32 2022 ] Batch(12/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0312
815
+ [ Thu Sep 15 02:50:44 2022 ] Batch(112/243) done. Loss: 0.0133 lr:0.000100 network_time: 0.0290
816
+ [ Thu Sep 15 02:51:57 2022 ] Batch(212/243) done. Loss: 0.0086 lr:0.000100 network_time: 0.0279
817
+ [ Thu Sep 15 02:52:19 2022 ] Eval epoch: 110
818
+ [ Thu Sep 15 02:53:52 2022 ] Mean test loss of 796 batches: 2.4391942024230957.
819
+ [ Thu Sep 15 02:53:52 2022 ] Top1: 54.12%
820
+ [ Thu Sep 15 02:53:53 2022 ] Top5: 83.40%
821
+ [ Thu Sep 15 02:53:53 2022 ] Training epoch: 111
822
+ [ Thu Sep 15 02:54:47 2022 ] Batch(69/243) done. Loss: 0.0309 lr:0.000100 network_time: 0.0285
823
+ [ Thu Sep 15 02:55:59 2022 ] Batch(169/243) done. Loss: 0.0130 lr:0.000100 network_time: 0.0274
824
+ [ Thu Sep 15 02:56:53 2022 ] Eval epoch: 111
825
+ [ Thu Sep 15 02:58:25 2022 ] Mean test loss of 796 batches: 2.412464141845703.
826
+ [ Thu Sep 15 02:58:25 2022 ] Top1: 54.33%
827
+ [ Thu Sep 15 02:58:26 2022 ] Top5: 83.24%
828
+ [ Thu Sep 15 02:58:26 2022 ] Training epoch: 112
829
+ [ Thu Sep 15 02:58:49 2022 ] Batch(26/243) done. Loss: 0.0087 lr:0.000100 network_time: 0.0325
830
+ [ Thu Sep 15 03:00:01 2022 ] Batch(126/243) done. Loss: 0.0126 lr:0.000100 network_time: 0.0319
831
+ [ Thu Sep 15 03:01:14 2022 ] Batch(226/243) done. Loss: 0.0311 lr:0.000100 network_time: 0.0269
832
+ [ Thu Sep 15 03:01:26 2022 ] Eval epoch: 112
833
+ [ Thu Sep 15 03:02:59 2022 ] Mean test loss of 796 batches: 2.444413900375366.
834
+ [ Thu Sep 15 03:02:59 2022 ] Top1: 53.70%
835
+ [ Thu Sep 15 03:02:59 2022 ] Top5: 82.84%
836
+ [ Thu Sep 15 03:03:00 2022 ] Training epoch: 113
837
+ [ Thu Sep 15 03:04:04 2022 ] Batch(83/243) done. Loss: 0.0062 lr:0.000100 network_time: 0.0278
838
+ [ Thu Sep 15 03:05:16 2022 ] Batch(183/243) done. Loss: 0.0063 lr:0.000100 network_time: 0.0313
839
+ [ Thu Sep 15 03:06:00 2022 ] Eval epoch: 113
840
+ [ Thu Sep 15 03:07:32 2022 ] Mean test loss of 796 batches: 2.4518001079559326.
841
+ [ Thu Sep 15 03:07:32 2022 ] Top1: 54.17%
842
+ [ Thu Sep 15 03:07:33 2022 ] Top5: 83.25%
843
+ [ Thu Sep 15 03:07:33 2022 ] Training epoch: 114
844
+ [ Thu Sep 15 03:08:05 2022 ] Batch(40/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0309
845
+ [ Thu Sep 15 03:09:18 2022 ] Batch(140/243) done. Loss: 0.0063 lr:0.000100 network_time: 0.0328
846
+ [ Thu Sep 15 03:10:31 2022 ] Batch(240/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0273
847
+ [ Thu Sep 15 03:10:33 2022 ] Eval epoch: 114
848
+ [ Thu Sep 15 03:12:04 2022 ] Mean test loss of 796 batches: 2.400949478149414.
849
+ [ Thu Sep 15 03:12:05 2022 ] Top1: 54.15%
850
+ [ Thu Sep 15 03:12:05 2022 ] Top5: 83.42%
851
+ [ Thu Sep 15 03:12:05 2022 ] Training epoch: 115
852
+ [ Thu Sep 15 03:13:19 2022 ] Batch(97/243) done. Loss: 0.0092 lr:0.000100 network_time: 0.0287
853
+ [ Thu Sep 15 03:14:32 2022 ] Batch(197/243) done. Loss: 0.0152 lr:0.000100 network_time: 0.0226
854
+ [ Thu Sep 15 03:15:05 2022 ] Eval epoch: 115
855
+ [ Thu Sep 15 03:16:37 2022 ] Mean test loss of 796 batches: 2.374425172805786.
856
+ [ Thu Sep 15 03:16:37 2022 ] Top1: 54.36%
857
+ [ Thu Sep 15 03:16:37 2022 ] Top5: 83.42%
858
+ [ Thu Sep 15 03:16:38 2022 ] Training epoch: 116
859
+ [ Thu Sep 15 03:17:20 2022 ] Batch(54/243) done. Loss: 0.0063 lr:0.000100 network_time: 0.0280
860
+ [ Thu Sep 15 03:18:33 2022 ] Batch(154/243) done. Loss: 0.0058 lr:0.000100 network_time: 0.0323
861
+ [ Thu Sep 15 03:19:37 2022 ] Eval epoch: 116
862
+ [ Thu Sep 15 03:21:10 2022 ] Mean test loss of 796 batches: 2.3895974159240723.
863
+ [ Thu Sep 15 03:21:11 2022 ] Top1: 54.53%
864
+ [ Thu Sep 15 03:21:11 2022 ] Top5: 83.50%
865
+ [ Thu Sep 15 03:21:11 2022 ] Training epoch: 117
866
+ [ Thu Sep 15 03:21:23 2022 ] Batch(11/243) done. Loss: 0.0143 lr:0.000100 network_time: 0.0276
867
+ [ Thu Sep 15 03:22:35 2022 ] Batch(111/243) done. Loss: 0.0089 lr:0.000100 network_time: 0.0298
868
+ [ Thu Sep 15 03:23:48 2022 ] Batch(211/243) done. Loss: 0.0094 lr:0.000100 network_time: 0.0317
869
+ [ Thu Sep 15 03:24:11 2022 ] Eval epoch: 117
870
+ [ Thu Sep 15 03:25:43 2022 ] Mean test loss of 796 batches: 2.401299476623535.
871
+ [ Thu Sep 15 03:25:44 2022 ] Top1: 54.57%
872
+ [ Thu Sep 15 03:25:44 2022 ] Top5: 83.51%
873
+ [ Thu Sep 15 03:25:44 2022 ] Training epoch: 118
874
+ [ Thu Sep 15 03:26:37 2022 ] Batch(68/243) done. Loss: 0.0067 lr:0.000100 network_time: 0.0279
875
+ [ Thu Sep 15 03:27:50 2022 ] Batch(168/243) done. Loss: 0.0088 lr:0.000100 network_time: 0.0352
876
+ [ Thu Sep 15 03:28:44 2022 ] Eval epoch: 118
877
+ [ Thu Sep 15 03:30:17 2022 ] Mean test loss of 796 batches: 2.439582586288452.
878
+ [ Thu Sep 15 03:30:17 2022 ] Top1: 53.99%
879
+ [ Thu Sep 15 03:30:17 2022 ] Top5: 83.18%
880
+ [ Thu Sep 15 03:30:18 2022 ] Training epoch: 119
881
+ [ Thu Sep 15 03:30:39 2022 ] Batch(25/243) done. Loss: 0.0191 lr:0.000100 network_time: 0.0256
882
+ [ Thu Sep 15 03:31:52 2022 ] Batch(125/243) done. Loss: 0.0259 lr:0.000100 network_time: 0.0326
883
+ [ Thu Sep 15 03:33:05 2022 ] Batch(225/243) done. Loss: 0.0171 lr:0.000100 network_time: 0.0325
884
+ [ Thu Sep 15 03:33:17 2022 ] Eval epoch: 119
885
+ [ Thu Sep 15 03:34:50 2022 ] Mean test loss of 796 batches: 2.4420394897460938.
886
+ [ Thu Sep 15 03:34:51 2022 ] Top1: 53.83%
887
+ [ Thu Sep 15 03:34:51 2022 ] Top5: 83.19%
888
+ [ Thu Sep 15 03:34:51 2022 ] Training epoch: 120
889
+ [ Thu Sep 15 03:35:54 2022 ] Batch(82/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0271
890
+ [ Thu Sep 15 03:37:07 2022 ] Batch(182/243) done. Loss: 0.0215 lr:0.000100 network_time: 0.0474
891
+ [ Thu Sep 15 03:37:51 2022 ] Eval epoch: 120
892
+ [ Thu Sep 15 03:39:23 2022 ] Mean test loss of 796 batches: 2.4470932483673096.
893
+ [ Thu Sep 15 03:39:24 2022 ] Top1: 54.29%
894
+ [ Thu Sep 15 03:39:24 2022 ] Top5: 83.36%
895
+ [ Thu Sep 15 03:39:24 2022 ] Training epoch: 121
896
+ [ Thu Sep 15 03:39:56 2022 ] Batch(39/243) done. Loss: 0.0121 lr:0.000100 network_time: 0.0261
897
+ [ Thu Sep 15 03:41:09 2022 ] Batch(139/243) done. Loss: 0.0200 lr:0.000100 network_time: 0.0289
898
+ [ Thu Sep 15 03:42:22 2022 ] Batch(239/243) done. Loss: 0.0115 lr:0.000100 network_time: 0.0284
899
+ [ Thu Sep 15 03:42:24 2022 ] Eval epoch: 121
900
+ [ Thu Sep 15 03:43:57 2022 ] Mean test loss of 796 batches: 2.3847100734710693.
901
+ [ Thu Sep 15 03:43:57 2022 ] Top1: 54.56%
902
+ [ Thu Sep 15 03:43:57 2022 ] Top5: 83.73%
903
+ [ Thu Sep 15 03:43:58 2022 ] Training epoch: 122
904
+ [ Thu Sep 15 03:45:11 2022 ] Batch(96/243) done. Loss: 0.0116 lr:0.000100 network_time: 0.0288
905
+ [ Thu Sep 15 03:46:24 2022 ] Batch(196/243) done. Loss: 0.0115 lr:0.000100 network_time: 0.0275
906
+ [ Thu Sep 15 03:46:57 2022 ] Eval epoch: 122
907
+ [ Thu Sep 15 03:48:30 2022 ] Mean test loss of 796 batches: 2.420030355453491.
908
+ [ Thu Sep 15 03:48:31 2022 ] Top1: 54.13%
909
+ [ Thu Sep 15 03:48:31 2022 ] Top5: 83.32%
910
+ [ Thu Sep 15 03:48:31 2022 ] Training epoch: 123
911
+ [ Thu Sep 15 03:49:13 2022 ] Batch(53/243) done. Loss: 0.0080 lr:0.000100 network_time: 0.0324
912
+ [ Thu Sep 15 03:50:26 2022 ] Batch(153/243) done. Loss: 0.0132 lr:0.000100 network_time: 0.0288
913
+ [ Thu Sep 15 03:51:31 2022 ] Eval epoch: 123
914
+ [ Thu Sep 15 03:53:03 2022 ] Mean test loss of 796 batches: 2.4071152210235596.
915
+ [ Thu Sep 15 03:53:04 2022 ] Top1: 54.40%
916
+ [ Thu Sep 15 03:53:04 2022 ] Top5: 83.43%
917
+ [ Thu Sep 15 03:53:04 2022 ] Training epoch: 124
918
+ [ Thu Sep 15 03:53:15 2022 ] Batch(10/243) done. Loss: 0.0096 lr:0.000100 network_time: 0.0536
919
+ [ Thu Sep 15 03:54:28 2022 ] Batch(110/243) done. Loss: 0.0061 lr:0.000100 network_time: 0.0281
920
+ [ Thu Sep 15 03:55:41 2022 ] Batch(210/243) done. Loss: 0.0115 lr:0.000100 network_time: 0.0273
921
+ [ Thu Sep 15 03:56:05 2022 ] Eval epoch: 124
922
+ [ Thu Sep 15 03:57:38 2022 ] Mean test loss of 796 batches: 2.3790695667266846.
923
+ [ Thu Sep 15 03:57:38 2022 ] Top1: 54.58%
924
+ [ Thu Sep 15 03:57:38 2022 ] Top5: 83.58%
925
+ [ Thu Sep 15 03:57:38 2022 ] Training epoch: 125
926
+ [ Thu Sep 15 03:58:31 2022 ] Batch(67/243) done. Loss: 0.0104 lr:0.000100 network_time: 0.0276
927
+ [ Thu Sep 15 03:59:44 2022 ] Batch(167/243) done. Loss: 0.0475 lr:0.000100 network_time: 0.0301
928
+ [ Thu Sep 15 04:00:38 2022 ] Eval epoch: 125
929
+ [ Thu Sep 15 04:02:11 2022 ] Mean test loss of 796 batches: 2.4057438373565674.
930
+ [ Thu Sep 15 04:02:11 2022 ] Top1: 54.09%
931
+ [ Thu Sep 15 04:02:11 2022 ] Top5: 83.32%
932
+ [ Thu Sep 15 04:02:12 2022 ] Training epoch: 126
933
+ [ Thu Sep 15 04:02:33 2022 ] Batch(24/243) done. Loss: 0.0141 lr:0.000100 network_time: 0.0558
934
+ [ Thu Sep 15 04:03:45 2022 ] Batch(124/243) done. Loss: 0.0090 lr:0.000100 network_time: 0.0287
935
+ [ Thu Sep 15 04:04:58 2022 ] Batch(224/243) done. Loss: 0.0155 lr:0.000100 network_time: 0.0302
936
+ [ Thu Sep 15 04:05:11 2022 ] Eval epoch: 126
937
+ [ Thu Sep 15 04:06:43 2022 ] Mean test loss of 796 batches: 2.4487736225128174.
938
+ [ Thu Sep 15 04:06:44 2022 ] Top1: 54.18%
939
+ [ Thu Sep 15 04:06:44 2022 ] Top5: 83.42%
940
+ [ Thu Sep 15 04:06:44 2022 ] Training epoch: 127
941
+ [ Thu Sep 15 04:07:47 2022 ] Batch(81/243) done. Loss: 0.0202 lr:0.000100 network_time: 0.0272
942
+ [ Thu Sep 15 04:09:00 2022 ] Batch(181/243) done. Loss: 0.0151 lr:0.000100 network_time: 0.0267
943
+ [ Thu Sep 15 04:09:44 2022 ] Eval epoch: 127
944
+ [ Thu Sep 15 04:11:16 2022 ] Mean test loss of 796 batches: 2.4265387058258057.
945
+ [ Thu Sep 15 04:11:16 2022 ] Top1: 54.10%
946
+ [ Thu Sep 15 04:11:17 2022 ] Top5: 83.26%
947
+ [ Thu Sep 15 04:11:17 2022 ] Training epoch: 128
948
+ [ Thu Sep 15 04:11:49 2022 ] Batch(38/243) done. Loss: 0.0314 lr:0.000100 network_time: 0.0346
949
+ [ Thu Sep 15 04:13:01 2022 ] Batch(138/243) done. Loss: 0.0061 lr:0.000100 network_time: 0.0271
950
+ [ Thu Sep 15 04:14:14 2022 ] Batch(238/243) done. Loss: 0.0216 lr:0.000100 network_time: 0.0269
951
+ [ Thu Sep 15 04:14:17 2022 ] Eval epoch: 128
952
+ [ Thu Sep 15 04:15:50 2022 ] Mean test loss of 796 batches: 2.398974895477295.
953
+ [ Thu Sep 15 04:15:50 2022 ] Top1: 54.33%
954
+ [ Thu Sep 15 04:15:51 2022 ] Top5: 83.47%
955
+ [ Thu Sep 15 04:15:51 2022 ] Training epoch: 129
956
+ [ Thu Sep 15 04:17:04 2022 ] Batch(95/243) done. Loss: 0.0208 lr:0.000100 network_time: 0.0319
957
+ [ Thu Sep 15 04:18:17 2022 ] Batch(195/243) done. Loss: 0.0156 lr:0.000100 network_time: 0.0264
958
+ [ Thu Sep 15 04:18:51 2022 ] Eval epoch: 129
959
+ [ Thu Sep 15 04:20:23 2022 ] Mean test loss of 796 batches: 2.408602714538574.
960
+ [ Thu Sep 15 04:20:24 2022 ] Top1: 54.14%
961
+ [ Thu Sep 15 04:20:24 2022 ] Top5: 83.28%
962
+ [ Thu Sep 15 04:20:24 2022 ] Training epoch: 130
963
+ [ Thu Sep 15 04:21:06 2022 ] Batch(52/243) done. Loss: 0.0093 lr:0.000100 network_time: 0.0272
964
+ [ Thu Sep 15 04:22:18 2022 ] Batch(152/243) done. Loss: 0.0065 lr:0.000100 network_time: 0.0276
965
+ [ Thu Sep 15 04:23:24 2022 ] Eval epoch: 130
966
+ [ Thu Sep 15 04:24:57 2022 ] Mean test loss of 796 batches: 2.363591194152832.
967
+ [ Thu Sep 15 04:24:58 2022 ] Top1: 54.93%
968
+ [ Thu Sep 15 04:24:58 2022 ] Top5: 83.73%
969
+ [ Thu Sep 15 04:24:58 2022 ] Training epoch: 131
970
+ [ Thu Sep 15 04:25:08 2022 ] Batch(9/243) done. Loss: 0.0161 lr:0.000100 network_time: 0.0317
971
+ [ Thu Sep 15 04:26:21 2022 ] Batch(109/243) done. Loss: 0.0075 lr:0.000100 network_time: 0.0283
972
+ [ Thu Sep 15 04:27:34 2022 ] Batch(209/243) done. Loss: 0.0127 lr:0.000100 network_time: 0.0266
973
+ [ Thu Sep 15 04:27:58 2022 ] Eval epoch: 131
974
+ [ Thu Sep 15 04:29:31 2022 ] Mean test loss of 796 batches: 2.4064884185791016.
975
+ [ Thu Sep 15 04:29:31 2022 ] Top1: 54.13%
976
+ [ Thu Sep 15 04:29:32 2022 ] Top5: 83.39%
977
+ [ Thu Sep 15 04:29:32 2022 ] Training epoch: 132
978
+ [ Thu Sep 15 04:30:23 2022 ] Batch(66/243) done. Loss: 0.0080 lr:0.000100 network_time: 0.0281
979
+ [ Thu Sep 15 04:31:36 2022 ] Batch(166/243) done. Loss: 0.0110 lr:0.000100 network_time: 0.0267
980
+ [ Thu Sep 15 04:32:32 2022 ] Eval epoch: 132
981
+ [ Thu Sep 15 04:34:05 2022 ] Mean test loss of 796 batches: 2.361076593399048.
982
+ [ Thu Sep 15 04:34:06 2022 ] Top1: 54.92%
983
+ [ Thu Sep 15 04:34:06 2022 ] Top5: 83.69%
984
+ [ Thu Sep 15 04:34:06 2022 ] Training epoch: 133
985
+ [ Thu Sep 15 04:34:27 2022 ] Batch(23/243) done. Loss: 0.0093 lr:0.000100 network_time: 0.0285
986
+ [ Thu Sep 15 04:35:40 2022 ] Batch(123/243) done. Loss: 0.0074 lr:0.000100 network_time: 0.0266
987
+ [ Thu Sep 15 04:36:52 2022 ] Batch(223/243) done. Loss: 0.0105 lr:0.000100 network_time: 0.0310
988
+ [ Thu Sep 15 04:37:06 2022 ] Eval epoch: 133
989
+ [ Thu Sep 15 04:38:40 2022 ] Mean test loss of 796 batches: 2.408871650695801.
990
+ [ Thu Sep 15 04:38:40 2022 ] Top1: 54.18%
991
+ [ Thu Sep 15 04:38:40 2022 ] Top5: 83.23%
992
+ [ Thu Sep 15 04:38:41 2022 ] Training epoch: 134
993
+ [ Thu Sep 15 04:39:42 2022 ] Batch(80/243) done. Loss: 0.0139 lr:0.000100 network_time: 0.0264
994
+ [ Thu Sep 15 04:40:55 2022 ] Batch(180/243) done. Loss: 0.0066 lr:0.000100 network_time: 0.0264
995
+ [ Thu Sep 15 04:41:40 2022 ] Eval epoch: 134
996
+ [ Thu Sep 15 04:43:13 2022 ] Mean test loss of 796 batches: 2.3809244632720947.
997
+ [ Thu Sep 15 04:43:13 2022 ] Top1: 54.55%
998
+ [ Thu Sep 15 04:43:13 2022 ] Top5: 83.58%
999
+ [ Thu Sep 15 04:43:13 2022 ] Training epoch: 135
1000
+ [ Thu Sep 15 04:43:44 2022 ] Batch(37/243) done. Loss: 0.0068 lr:0.000100 network_time: 0.0288
1001
+ [ Thu Sep 15 04:44:57 2022 ] Batch(137/243) done. Loss: 0.0115 lr:0.000100 network_time: 0.0331
1002
+ [ Thu Sep 15 04:46:10 2022 ] Batch(237/243) done. Loss: 0.0098 lr:0.000100 network_time: 0.0309
1003
+ [ Thu Sep 15 04:46:14 2022 ] Eval epoch: 135
1004
+ [ Thu Sep 15 04:47:46 2022 ] Mean test loss of 796 batches: 2.4107375144958496.
1005
+ [ Thu Sep 15 04:47:46 2022 ] Top1: 54.19%
1006
+ [ Thu Sep 15 04:47:47 2022 ] Top5: 83.28%
1007
+ [ Thu Sep 15 04:47:47 2022 ] Training epoch: 136
1008
+ [ Thu Sep 15 04:48:58 2022 ] Batch(94/243) done. Loss: 0.0069 lr:0.000100 network_time: 0.0280
1009
+ [ Thu Sep 15 04:50:11 2022 ] Batch(194/243) done. Loss: 0.0253 lr:0.000100 network_time: 0.0259
1010
+ [ Thu Sep 15 04:50:46 2022 ] Eval epoch: 136
1011
+ [ Thu Sep 15 04:52:19 2022 ] Mean test loss of 796 batches: 2.412553310394287.
1012
+ [ Thu Sep 15 04:52:19 2022 ] Top1: 54.30%
1013
+ [ Thu Sep 15 04:52:19 2022 ] Top5: 83.34%
1014
+ [ Thu Sep 15 04:52:20 2022 ] Training epoch: 137
1015
+ [ Thu Sep 15 04:53:00 2022 ] Batch(51/243) done. Loss: 0.0092 lr:0.000100 network_time: 0.0282
1016
+ [ Thu Sep 15 04:54:13 2022 ] Batch(151/243) done. Loss: 0.0118 lr:0.000100 network_time: 0.0291
1017
+ [ Thu Sep 15 04:55:19 2022 ] Eval epoch: 137
1018
+ [ Thu Sep 15 04:56:52 2022 ] Mean test loss of 796 batches: 2.393608331680298.
1019
+ [ Thu Sep 15 04:56:52 2022 ] Top1: 54.69%
1020
+ [ Thu Sep 15 04:56:53 2022 ] Top5: 83.61%
1021
+ [ Thu Sep 15 04:56:53 2022 ] Training epoch: 138
1022
+ [ Thu Sep 15 04:57:02 2022 ] Batch(8/243) done. Loss: 0.0195 lr:0.000100 network_time: 0.0266
1023
+ [ Thu Sep 15 04:58:15 2022 ] Batch(108/243) done. Loss: 0.0269 lr:0.000100 network_time: 0.0290
1024
+ [ Thu Sep 15 04:59:28 2022 ] Batch(208/243) done. Loss: 0.0116 lr:0.000100 network_time: 0.0282
1025
+ [ Thu Sep 15 04:59:53 2022 ] Eval epoch: 138
1026
+ [ Thu Sep 15 05:01:26 2022 ] Mean test loss of 796 batches: 2.412797212600708.
1027
+ [ Thu Sep 15 05:01:26 2022 ] Top1: 53.92%
1028
+ [ Thu Sep 15 05:01:26 2022 ] Top5: 83.29%
1029
+ [ Thu Sep 15 05:01:27 2022 ] Training epoch: 139
1030
+ [ Thu Sep 15 05:02:18 2022 ] Batch(65/243) done. Loss: 0.0478 lr:0.000100 network_time: 0.0294
1031
+ [ Thu Sep 15 05:03:30 2022 ] Batch(165/243) done. Loss: 0.0116 lr:0.000100 network_time: 0.0262
1032
+ [ Thu Sep 15 05:04:27 2022 ] Eval epoch: 139
1033
+ [ Thu Sep 15 05:05:59 2022 ] Mean test loss of 796 batches: 2.435910701751709.
1034
+ [ Thu Sep 15 05:06:00 2022 ] Top1: 54.09%
1035
+ [ Thu Sep 15 05:06:00 2022 ] Top5: 83.17%
1036
+ [ Thu Sep 15 05:06:00 2022 ] Training epoch: 140
1037
+ [ Thu Sep 15 05:06:20 2022 ] Batch(22/243) done. Loss: 0.0075 lr:0.000100 network_time: 0.0277
1038
+ [ Thu Sep 15 05:07:33 2022 ] Batch(122/243) done. Loss: 0.0352 lr:0.000100 network_time: 0.0317
1039
+ [ Thu Sep 15 05:08:45 2022 ] Batch(222/243) done. Loss: 0.0075 lr:0.000100 network_time: 0.0277
1040
+ [ Thu Sep 15 05:09:00 2022 ] Eval epoch: 140
1041
+ [ Thu Sep 15 05:10:33 2022 ] Mean test loss of 796 batches: 2.3889408111572266.
1042
+ [ Thu Sep 15 05:10:33 2022 ] Top1: 54.65%
1043
+ [ Thu Sep 15 05:10:34 2022 ] Top5: 83.62%
ckpt/Others/Shift-GCN/ntu120_xsub/ntu120_joint_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/config.yaml ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu_ShiftGCN_bone_motion_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/nturgbd-cross-subject/train_bone_motion.yaml
5
+ device:
6
+ - 4
7
+ - 5
8
+ - 6
9
+ - 7
10
+ eval_interval: 5
11
+ feeder: feeders.feeder.Feeder
12
+ ignore_weights: []
13
+ log_interval: 100
14
+ model: model.shift_gcn.Model
15
+ model_args:
16
+ graph: graph.ntu_rgb_d.Graph
17
+ graph_args:
18
+ labeling_mode: spatial
19
+ num_class: 60
20
+ num_person: 2
21
+ num_point: 25
22
+ model_saved_name: ./save_models/ntu_ShiftGCN_bone_motion_xsub
23
+ nesterov: true
24
+ num_epoch: 140
25
+ num_worker: 32
26
+ only_train_epoch: 1
27
+ only_train_part: true
28
+ optimizer: SGD
29
+ phase: train
30
+ print_log: true
31
+ save_interval: 2
32
+ save_score: false
33
+ seed: 1
34
+ show_topk:
35
+ - 1
36
+ - 5
37
+ start_epoch: 0
38
+ step:
39
+ - 60
40
+ - 80
41
+ - 100
42
+ test_batch_size: 64
43
+ test_feeder_args:
44
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy
45
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
46
+ train_feeder_args:
47
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy
48
+ debug: false
49
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
50
+ normalization: false
51
+ random_choose: false
52
+ random_move: false
53
+ random_shift: false
54
+ window_size: -1
55
+ warm_up_epoch: 0
56
+ weight_decay: 0.0001
57
+ weights: null
58
+ work_dir: ./work_dir/ntu_ShiftGCN_bone_motion_xsub
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d8d96c42be1b3bb086a94b5fc3ff17cda1a170098b51a4909889e7109b7b5f8c
3
+ size 4979902
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/log.txt ADDED
@@ -0,0 +1,875 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 15 17:47:56 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu_ShiftGCN_bone_motion_xsub', 'model_saved_name': './save_models/ntu_ShiftGCN_bone_motion_xsub', 'Experiment_name': 'ntu_ShiftGCN_bone_motion_xsub', 'config': './config/nturgbd-cross-subject/train_bone_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [4, 5, 6, 7], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Thu Sep 15 17:47:56 2022 ] Training epoch: 1
5
+ [ Thu Sep 15 17:48:44 2022 ] Batch(99/123) done. Loss: 2.3221 lr:0.100000 network_time: 0.0504
6
+ [ Thu Sep 15 17:48:52 2022 ] Eval epoch: 1
7
+ [ Thu Sep 15 17:49:15 2022 ] Mean test loss of 258 batches: 5.581438064575195.
8
+ [ Thu Sep 15 17:49:15 2022 ] Top1: 11.46%
9
+ [ Thu Sep 15 17:49:15 2022 ] Top5: 35.39%
10
+ [ Thu Sep 15 17:49:15 2022 ] Training epoch: 2
11
+ [ Thu Sep 15 17:49:47 2022 ] Batch(76/123) done. Loss: 1.9718 lr:0.100000 network_time: 0.0507
12
+ [ Thu Sep 15 17:50:05 2022 ] Eval epoch: 2
13
+ [ Thu Sep 15 17:50:27 2022 ] Mean test loss of 258 batches: 4.6655802726745605.
14
+ [ Thu Sep 15 17:50:28 2022 ] Top1: 22.79%
15
+ [ Thu Sep 15 17:50:28 2022 ] Top5: 53.54%
16
+ [ Thu Sep 15 17:50:28 2022 ] Training epoch: 3
17
+ [ Thu Sep 15 17:50:51 2022 ] Batch(53/123) done. Loss: 1.8279 lr:0.100000 network_time: 0.0490
18
+ [ Thu Sep 15 17:51:17 2022 ] Eval epoch: 3
19
+ [ Thu Sep 15 17:51:40 2022 ] Mean test loss of 258 batches: 4.853720188140869.
20
+ [ Thu Sep 15 17:51:40 2022 ] Top1: 23.39%
21
+ [ Thu Sep 15 17:51:40 2022 ] Top5: 53.48%
22
+ [ Thu Sep 15 17:51:41 2022 ] Training epoch: 4
23
+ [ Thu Sep 15 17:51:56 2022 ] Batch(30/123) done. Loss: 1.7001 lr:0.100000 network_time: 0.0515
24
+ [ Thu Sep 15 17:52:30 2022 ] Eval epoch: 4
25
+ [ Thu Sep 15 17:52:53 2022 ] Mean test loss of 258 batches: 4.209709644317627.
26
+ [ Thu Sep 15 17:52:53 2022 ] Top1: 28.01%
27
+ [ Thu Sep 15 17:52:53 2022 ] Top5: 59.43%
28
+ [ Thu Sep 15 17:52:53 2022 ] Training epoch: 5
29
+ [ Thu Sep 15 17:53:00 2022 ] Batch(7/123) done. Loss: 1.2025 lr:0.100000 network_time: 0.0503
30
+ [ Thu Sep 15 17:53:37 2022 ] Batch(107/123) done. Loss: 1.0203 lr:0.100000 network_time: 0.0518
31
+ [ Thu Sep 15 17:53:43 2022 ] Eval epoch: 5
32
+ [ Thu Sep 15 17:54:05 2022 ] Mean test loss of 258 batches: 4.861428260803223.
33
+ [ Thu Sep 15 17:54:05 2022 ] Top1: 19.81%
34
+ [ Thu Sep 15 17:54:05 2022 ] Top5: 47.30%
35
+ [ Thu Sep 15 17:54:05 2022 ] Training epoch: 6
36
+ [ Thu Sep 15 17:54:40 2022 ] Batch(84/123) done. Loss: 0.9977 lr:0.100000 network_time: 0.0506
37
+ [ Thu Sep 15 17:54:55 2022 ] Eval epoch: 6
38
+ [ Thu Sep 15 17:55:17 2022 ] Mean test loss of 258 batches: 3.8953726291656494.
39
+ [ Thu Sep 15 17:55:17 2022 ] Top1: 30.00%
40
+ [ Thu Sep 15 17:55:17 2022 ] Top5: 62.61%
41
+ [ Thu Sep 15 17:55:18 2022 ] Training epoch: 7
42
+ [ Thu Sep 15 17:55:44 2022 ] Batch(61/123) done. Loss: 0.9246 lr:0.100000 network_time: 0.0473
43
+ [ Thu Sep 15 17:56:06 2022 ] Eval epoch: 7
44
+ [ Thu Sep 15 17:56:29 2022 ] Mean test loss of 258 batches: 4.290779113769531.
45
+ [ Thu Sep 15 17:56:29 2022 ] Top1: 23.05%
46
+ [ Thu Sep 15 17:56:29 2022 ] Top5: 54.76%
47
+ [ Thu Sep 15 17:56:29 2022 ] Training epoch: 8
48
+ [ Thu Sep 15 17:56:48 2022 ] Batch(38/123) done. Loss: 0.7861 lr:0.100000 network_time: 0.0654
49
+ [ Thu Sep 15 17:57:19 2022 ] Eval epoch: 8
50
+ [ Thu Sep 15 17:57:41 2022 ] Mean test loss of 258 batches: 4.356398582458496.
51
+ [ Thu Sep 15 17:57:42 2022 ] Top1: 30.18%
52
+ [ Thu Sep 15 17:57:42 2022 ] Top5: 60.22%
53
+ [ Thu Sep 15 17:57:42 2022 ] Training epoch: 9
54
+ [ Thu Sep 15 17:57:52 2022 ] Batch(15/123) done. Loss: 0.7096 lr:0.100000 network_time: 0.0512
55
+ [ Thu Sep 15 17:58:28 2022 ] Batch(115/123) done. Loss: 0.8358 lr:0.100000 network_time: 0.0508
56
+ [ Thu Sep 15 17:58:31 2022 ] Eval epoch: 9
57
+ [ Thu Sep 15 17:58:54 2022 ] Mean test loss of 258 batches: 3.7923290729522705.
58
+ [ Thu Sep 15 17:58:55 2022 ] Top1: 36.03%
59
+ [ Thu Sep 15 17:58:55 2022 ] Top5: 67.22%
60
+ [ Thu Sep 15 17:58:55 2022 ] Training epoch: 10
61
+ [ Thu Sep 15 17:59:33 2022 ] Batch(92/123) done. Loss: 0.6539 lr:0.100000 network_time: 0.0534
62
+ [ Thu Sep 15 17:59:44 2022 ] Eval epoch: 10
63
+ [ Thu Sep 15 18:00:07 2022 ] Mean test loss of 258 batches: 2.477128744125366.
64
+ [ Thu Sep 15 18:00:07 2022 ] Top1: 43.65%
65
+ [ Thu Sep 15 18:00:07 2022 ] Top5: 77.15%
66
+ [ Thu Sep 15 18:00:07 2022 ] Training epoch: 11
67
+ [ Thu Sep 15 18:00:37 2022 ] Batch(69/123) done. Loss: 0.6608 lr:0.100000 network_time: 0.0509
68
+ [ Thu Sep 15 18:00:56 2022 ] Eval epoch: 11
69
+ [ Thu Sep 15 18:01:19 2022 ] Mean test loss of 258 batches: 2.566040515899658.
70
+ [ Thu Sep 15 18:01:19 2022 ] Top1: 45.59%
71
+ [ Thu Sep 15 18:01:19 2022 ] Top5: 80.48%
72
+ [ Thu Sep 15 18:01:19 2022 ] Training epoch: 12
73
+ [ Thu Sep 15 18:01:41 2022 ] Batch(46/123) done. Loss: 0.8870 lr:0.100000 network_time: 0.0499
74
+ [ Thu Sep 15 18:02:09 2022 ] Eval epoch: 12
75
+ [ Thu Sep 15 18:02:32 2022 ] Mean test loss of 258 batches: 2.7983829975128174.
76
+ [ Thu Sep 15 18:02:32 2022 ] Top1: 46.18%
77
+ [ Thu Sep 15 18:02:32 2022 ] Top5: 81.09%
78
+ [ Thu Sep 15 18:02:32 2022 ] Training epoch: 13
79
+ [ Thu Sep 15 18:02:45 2022 ] Batch(23/123) done. Loss: 0.5512 lr:0.100000 network_time: 0.0504
80
+ [ Thu Sep 15 18:03:21 2022 ] Eval epoch: 13
81
+ [ Thu Sep 15 18:03:44 2022 ] Mean test loss of 258 batches: 3.2849953174591064.
82
+ [ Thu Sep 15 18:03:44 2022 ] Top1: 40.53%
83
+ [ Thu Sep 15 18:03:44 2022 ] Top5: 75.00%
84
+ [ Thu Sep 15 18:03:44 2022 ] Training epoch: 14
85
+ [ Thu Sep 15 18:03:48 2022 ] Batch(0/123) done. Loss: 0.5177 lr:0.100000 network_time: 0.0880
86
+ [ Thu Sep 15 18:04:25 2022 ] Batch(100/123) done. Loss: 0.6363 lr:0.100000 network_time: 0.0537
87
+ [ Thu Sep 15 18:04:33 2022 ] Eval epoch: 14
88
+ [ Thu Sep 15 18:04:56 2022 ] Mean test loss of 258 batches: 2.9385926723480225.
89
+ [ Thu Sep 15 18:04:56 2022 ] Top1: 43.08%
90
+ [ Thu Sep 15 18:04:56 2022 ] Top5: 77.56%
91
+ [ Thu Sep 15 18:04:56 2022 ] Training epoch: 15
92
+ [ Thu Sep 15 18:05:29 2022 ] Batch(77/123) done. Loss: 0.6975 lr:0.100000 network_time: 0.0499
93
+ [ Thu Sep 15 18:05:46 2022 ] Eval epoch: 15
94
+ [ Thu Sep 15 18:06:09 2022 ] Mean test loss of 258 batches: 3.6805732250213623.
95
+ [ Thu Sep 15 18:06:09 2022 ] Top1: 37.67%
96
+ [ Thu Sep 15 18:06:09 2022 ] Top5: 71.69%
97
+ [ Thu Sep 15 18:06:09 2022 ] Training epoch: 16
98
+ [ Thu Sep 15 18:06:33 2022 ] Batch(54/123) done. Loss: 0.5238 lr:0.100000 network_time: 0.0507
99
+ [ Thu Sep 15 18:06:58 2022 ] Eval epoch: 16
100
+ [ Thu Sep 15 18:07:21 2022 ] Mean test loss of 258 batches: 3.097450017929077.
101
+ [ Thu Sep 15 18:07:21 2022 ] Top1: 37.42%
102
+ [ Thu Sep 15 18:07:21 2022 ] Top5: 76.38%
103
+ [ Thu Sep 15 18:07:21 2022 ] Training epoch: 17
104
+ [ Thu Sep 15 18:07:37 2022 ] Batch(31/123) done. Loss: 0.6358 lr:0.100000 network_time: 0.0481
105
+ [ Thu Sep 15 18:08:11 2022 ] Eval epoch: 17
106
+ [ Thu Sep 15 18:08:33 2022 ] Mean test loss of 258 batches: 3.424067497253418.
107
+ [ Thu Sep 15 18:08:33 2022 ] Top1: 42.69%
108
+ [ Thu Sep 15 18:08:34 2022 ] Top5: 77.68%
109
+ [ Thu Sep 15 18:08:34 2022 ] Training epoch: 18
110
+ [ Thu Sep 15 18:08:41 2022 ] Batch(8/123) done. Loss: 0.3318 lr:0.100000 network_time: 0.0513
111
+ [ Thu Sep 15 18:09:18 2022 ] Batch(108/123) done. Loss: 0.4070 lr:0.100000 network_time: 0.0516
112
+ [ Thu Sep 15 18:09:23 2022 ] Eval epoch: 18
113
+ [ Thu Sep 15 18:09:46 2022 ] Mean test loss of 258 batches: 2.758646011352539.
114
+ [ Thu Sep 15 18:09:46 2022 ] Top1: 45.64%
115
+ [ Thu Sep 15 18:09:46 2022 ] Top5: 77.97%
116
+ [ Thu Sep 15 18:09:46 2022 ] Training epoch: 19
117
+ [ Thu Sep 15 18:10:22 2022 ] Batch(85/123) done. Loss: 0.5093 lr:0.100000 network_time: 0.0535
118
+ [ Thu Sep 15 18:10:36 2022 ] Eval epoch: 19
119
+ [ Thu Sep 15 18:10:59 2022 ] Mean test loss of 258 batches: 2.7793309688568115.
120
+ [ Thu Sep 15 18:10:59 2022 ] Top1: 46.62%
121
+ [ Thu Sep 15 18:10:59 2022 ] Top5: 79.77%
122
+ [ Thu Sep 15 18:10:59 2022 ] Training epoch: 20
123
+ [ Thu Sep 15 18:11:26 2022 ] Batch(62/123) done. Loss: 0.3249 lr:0.100000 network_time: 0.0485
124
+ [ Thu Sep 15 18:11:48 2022 ] Eval epoch: 20
125
+ [ Thu Sep 15 18:12:11 2022 ] Mean test loss of 258 batches: 3.609997034072876.
126
+ [ Thu Sep 15 18:12:11 2022 ] Top1: 39.23%
127
+ [ Thu Sep 15 18:12:11 2022 ] Top5: 69.32%
128
+ [ Thu Sep 15 18:12:11 2022 ] Training epoch: 21
129
+ [ Thu Sep 15 18:12:30 2022 ] Batch(39/123) done. Loss: 0.4764 lr:0.100000 network_time: 0.0517
130
+ [ Thu Sep 15 18:13:01 2022 ] Eval epoch: 21
131
+ [ Thu Sep 15 18:13:24 2022 ] Mean test loss of 258 batches: 2.8876986503601074.
132
+ [ Thu Sep 15 18:13:24 2022 ] Top1: 46.73%
133
+ [ Thu Sep 15 18:13:24 2022 ] Top5: 80.28%
134
+ [ Thu Sep 15 18:13:24 2022 ] Training epoch: 22
135
+ [ Thu Sep 15 18:13:34 2022 ] Batch(16/123) done. Loss: 0.3911 lr:0.100000 network_time: 0.0489
136
+ [ Thu Sep 15 18:14:11 2022 ] Batch(116/123) done. Loss: 0.3523 lr:0.100000 network_time: 0.0512
137
+ [ Thu Sep 15 18:14:13 2022 ] Eval epoch: 22
138
+ [ Thu Sep 15 18:14:36 2022 ] Mean test loss of 258 batches: 3.206068992614746.
139
+ [ Thu Sep 15 18:14:36 2022 ] Top1: 40.12%
140
+ [ Thu Sep 15 18:14:36 2022 ] Top5: 75.93%
141
+ [ Thu Sep 15 18:14:36 2022 ] Training epoch: 23
142
+ [ Thu Sep 15 18:15:15 2022 ] Batch(93/123) done. Loss: 0.6690 lr:0.100000 network_time: 0.0495
143
+ [ Thu Sep 15 18:15:26 2022 ] Eval epoch: 23
144
+ [ Thu Sep 15 18:15:49 2022 ] Mean test loss of 258 batches: 2.487531900405884.
145
+ [ Thu Sep 15 18:15:49 2022 ] Top1: 50.01%
146
+ [ Thu Sep 15 18:15:49 2022 ] Top5: 81.78%
147
+ [ Thu Sep 15 18:15:49 2022 ] Training epoch: 24
148
+ [ Thu Sep 15 18:16:19 2022 ] Batch(70/123) done. Loss: 0.3341 lr:0.100000 network_time: 0.0536
149
+ [ Thu Sep 15 18:16:38 2022 ] Eval epoch: 24
150
+ [ Thu Sep 15 18:17:00 2022 ] Mean test loss of 258 batches: 3.2020022869110107.
151
+ [ Thu Sep 15 18:17:00 2022 ] Top1: 46.64%
152
+ [ Thu Sep 15 18:17:00 2022 ] Top5: 81.62%
153
+ [ Thu Sep 15 18:17:00 2022 ] Training epoch: 25
154
+ [ Thu Sep 15 18:17:22 2022 ] Batch(47/123) done. Loss: 0.2572 lr:0.100000 network_time: 0.0522
155
+ [ Thu Sep 15 18:17:50 2022 ] Eval epoch: 25
156
+ [ Thu Sep 15 18:18:13 2022 ] Mean test loss of 258 batches: 2.5256783962249756.
157
+ [ Thu Sep 15 18:18:13 2022 ] Top1: 53.64%
158
+ [ Thu Sep 15 18:18:13 2022 ] Top5: 85.21%
159
+ [ Thu Sep 15 18:18:13 2022 ] Training epoch: 26
160
+ [ Thu Sep 15 18:18:27 2022 ] Batch(24/123) done. Loss: 0.4852 lr:0.100000 network_time: 0.0493
161
+ [ Thu Sep 15 18:19:03 2022 ] Eval epoch: 26
162
+ [ Thu Sep 15 18:19:26 2022 ] Mean test loss of 258 batches: 2.4896974563598633.
163
+ [ Thu Sep 15 18:19:26 2022 ] Top1: 49.52%
164
+ [ Thu Sep 15 18:19:26 2022 ] Top5: 81.65%
165
+ [ Thu Sep 15 18:19:26 2022 ] Training epoch: 27
166
+ [ Thu Sep 15 18:19:30 2022 ] Batch(1/123) done. Loss: 0.1621 lr:0.100000 network_time: 0.0512
167
+ [ Thu Sep 15 18:20:07 2022 ] Batch(101/123) done. Loss: 0.4268 lr:0.100000 network_time: 0.0532
168
+ [ Thu Sep 15 18:20:15 2022 ] Eval epoch: 27
169
+ [ Thu Sep 15 18:20:38 2022 ] Mean test loss of 258 batches: 2.5876340866088867.
170
+ [ Thu Sep 15 18:20:38 2022 ] Top1: 50.75%
171
+ [ Thu Sep 15 18:20:38 2022 ] Top5: 83.54%
172
+ [ Thu Sep 15 18:20:38 2022 ] Training epoch: 28
173
+ [ Thu Sep 15 18:21:11 2022 ] Batch(78/123) done. Loss: 0.6055 lr:0.100000 network_time: 0.0523
174
+ [ Thu Sep 15 18:21:27 2022 ] Eval epoch: 28
175
+ [ Thu Sep 15 18:21:50 2022 ] Mean test loss of 258 batches: 2.8039703369140625.
176
+ [ Thu Sep 15 18:21:50 2022 ] Top1: 49.68%
177
+ [ Thu Sep 15 18:21:50 2022 ] Top5: 83.48%
178
+ [ Thu Sep 15 18:21:50 2022 ] Training epoch: 29
179
+ [ Thu Sep 15 18:22:15 2022 ] Batch(55/123) done. Loss: 0.2838 lr:0.100000 network_time: 0.0543
180
+ [ Thu Sep 15 18:22:40 2022 ] Eval epoch: 29
181
+ [ Thu Sep 15 18:23:03 2022 ] Mean test loss of 258 batches: 2.4521985054016113.
182
+ [ Thu Sep 15 18:23:03 2022 ] Top1: 51.04%
183
+ [ Thu Sep 15 18:23:03 2022 ] Top5: 84.35%
184
+ [ Thu Sep 15 18:23:03 2022 ] Training epoch: 30
185
+ [ Thu Sep 15 18:23:19 2022 ] Batch(32/123) done. Loss: 0.3162 lr:0.100000 network_time: 0.0495
186
+ [ Thu Sep 15 18:23:52 2022 ] Eval epoch: 30
187
+ [ Thu Sep 15 18:24:15 2022 ] Mean test loss of 258 batches: 2.2596545219421387.
188
+ [ Thu Sep 15 18:24:15 2022 ] Top1: 54.98%
189
+ [ Thu Sep 15 18:24:15 2022 ] Top5: 84.12%
190
+ [ Thu Sep 15 18:24:15 2022 ] Training epoch: 31
191
+ [ Thu Sep 15 18:24:23 2022 ] Batch(9/123) done. Loss: 0.2001 lr:0.100000 network_time: 0.0504
192
+ [ Thu Sep 15 18:24:59 2022 ] Batch(109/123) done. Loss: 0.2150 lr:0.100000 network_time: 0.0578
193
+ [ Thu Sep 15 18:25:05 2022 ] Eval epoch: 31
194
+ [ Thu Sep 15 18:25:27 2022 ] Mean test loss of 258 batches: 2.3336315155029297.
195
+ [ Thu Sep 15 18:25:27 2022 ] Top1: 53.68%
196
+ [ Thu Sep 15 18:25:27 2022 ] Top5: 83.42%
197
+ [ Thu Sep 15 18:25:27 2022 ] Training epoch: 32
198
+ [ Thu Sep 15 18:26:03 2022 ] Batch(86/123) done. Loss: 0.2091 lr:0.100000 network_time: 0.0493
199
+ [ Thu Sep 15 18:26:17 2022 ] Eval epoch: 32
200
+ [ Thu Sep 15 18:26:39 2022 ] Mean test loss of 258 batches: 3.5852153301239014.
201
+ [ Thu Sep 15 18:26:39 2022 ] Top1: 39.52%
202
+ [ Thu Sep 15 18:26:39 2022 ] Top5: 75.45%
203
+ [ Thu Sep 15 18:26:39 2022 ] Training epoch: 33
204
+ [ Thu Sep 15 18:27:07 2022 ] Batch(63/123) done. Loss: 0.1526 lr:0.100000 network_time: 0.0513
205
+ [ Thu Sep 15 18:27:29 2022 ] Eval epoch: 33
206
+ [ Thu Sep 15 18:27:52 2022 ] Mean test loss of 258 batches: 2.2551028728485107.
207
+ [ Thu Sep 15 18:27:52 2022 ] Top1: 55.06%
208
+ [ Thu Sep 15 18:27:52 2022 ] Top5: 85.82%
209
+ [ Thu Sep 15 18:27:52 2022 ] Training epoch: 34
210
+ [ Thu Sep 15 18:28:11 2022 ] Batch(40/123) done. Loss: 0.3211 lr:0.100000 network_time: 0.0549
211
+ [ Thu Sep 15 18:28:42 2022 ] Eval epoch: 34
212
+ [ Thu Sep 15 18:29:05 2022 ] Mean test loss of 258 batches: 2.2728934288024902.
213
+ [ Thu Sep 15 18:29:05 2022 ] Top1: 52.71%
214
+ [ Thu Sep 15 18:29:05 2022 ] Top5: 85.59%
215
+ [ Thu Sep 15 18:29:05 2022 ] Training epoch: 35
216
+ [ Thu Sep 15 18:29:16 2022 ] Batch(17/123) done. Loss: 0.1437 lr:0.100000 network_time: 0.0499
217
+ [ Thu Sep 15 18:29:52 2022 ] Batch(117/123) done. Loss: 0.2225 lr:0.100000 network_time: 0.0519
218
+ [ Thu Sep 15 18:29:54 2022 ] Eval epoch: 35
219
+ [ Thu Sep 15 18:30:17 2022 ] Mean test loss of 258 batches: 2.5030672550201416.
220
+ [ Thu Sep 15 18:30:17 2022 ] Top1: 49.17%
221
+ [ Thu Sep 15 18:30:17 2022 ] Top5: 82.40%
222
+ [ Thu Sep 15 18:30:17 2022 ] Training epoch: 36
223
+ [ Thu Sep 15 18:30:56 2022 ] Batch(94/123) done. Loss: 0.1613 lr:0.100000 network_time: 0.0522
224
+ [ Thu Sep 15 18:31:06 2022 ] Eval epoch: 36
225
+ [ Thu Sep 15 18:31:29 2022 ] Mean test loss of 258 batches: 3.4875588417053223.
226
+ [ Thu Sep 15 18:31:29 2022 ] Top1: 38.61%
227
+ [ Thu Sep 15 18:31:29 2022 ] Top5: 70.11%
228
+ [ Thu Sep 15 18:31:29 2022 ] Training epoch: 37
229
+ [ Thu Sep 15 18:31:59 2022 ] Batch(71/123) done. Loss: 0.3926 lr:0.100000 network_time: 0.0515
230
+ [ Thu Sep 15 18:32:18 2022 ] Eval epoch: 37
231
+ [ Thu Sep 15 18:32:41 2022 ] Mean test loss of 258 batches: 2.094217300415039.
232
+ [ Thu Sep 15 18:32:41 2022 ] Top1: 56.10%
233
+ [ Thu Sep 15 18:32:41 2022 ] Top5: 86.88%
234
+ [ Thu Sep 15 18:32:41 2022 ] Training epoch: 38
235
+ [ Thu Sep 15 18:33:03 2022 ] Batch(48/123) done. Loss: 0.1567 lr:0.100000 network_time: 0.0518
236
+ [ Thu Sep 15 18:33:31 2022 ] Eval epoch: 38
237
+ [ Thu Sep 15 18:33:53 2022 ] Mean test loss of 258 batches: 3.110715866088867.
238
+ [ Thu Sep 15 18:33:53 2022 ] Top1: 43.77%
239
+ [ Thu Sep 15 18:33:53 2022 ] Top5: 78.10%
240
+ [ Thu Sep 15 18:33:53 2022 ] Training epoch: 39
241
+ [ Thu Sep 15 18:34:07 2022 ] Batch(25/123) done. Loss: 0.3969 lr:0.100000 network_time: 0.0491
242
+ [ Thu Sep 15 18:34:43 2022 ] Eval epoch: 39
243
+ [ Thu Sep 15 18:35:06 2022 ] Mean test loss of 258 batches: 2.423691987991333.
244
+ [ Thu Sep 15 18:35:06 2022 ] Top1: 50.49%
245
+ [ Thu Sep 15 18:35:06 2022 ] Top5: 85.05%
246
+ [ Thu Sep 15 18:35:06 2022 ] Training epoch: 40
247
+ [ Thu Sep 15 18:35:11 2022 ] Batch(2/123) done. Loss: 0.3346 lr:0.100000 network_time: 0.0495
248
+ [ Thu Sep 15 18:35:47 2022 ] Batch(102/123) done. Loss: 0.2849 lr:0.100000 network_time: 0.0472
249
+ [ Thu Sep 15 18:35:55 2022 ] Eval epoch: 40
250
+ [ Thu Sep 15 18:36:18 2022 ] Mean test loss of 258 batches: 1.968823790550232.
251
+ [ Thu Sep 15 18:36:18 2022 ] Top1: 56.49%
252
+ [ Thu Sep 15 18:36:18 2022 ] Top5: 86.26%
253
+ [ Thu Sep 15 18:36:18 2022 ] Training epoch: 41
254
+ [ Thu Sep 15 18:36:52 2022 ] Batch(79/123) done. Loss: 0.2500 lr:0.100000 network_time: 0.0508
255
+ [ Thu Sep 15 18:37:08 2022 ] Eval epoch: 41
256
+ [ Thu Sep 15 18:37:30 2022 ] Mean test loss of 258 batches: 3.1059272289276123.
257
+ [ Thu Sep 15 18:37:30 2022 ] Top1: 46.44%
258
+ [ Thu Sep 15 18:37:30 2022 ] Top5: 79.26%
259
+ [ Thu Sep 15 18:37:30 2022 ] Training epoch: 42
260
+ [ Thu Sep 15 18:37:56 2022 ] Batch(56/123) done. Loss: 0.2638 lr:0.100000 network_time: 0.0531
261
+ [ Thu Sep 15 18:38:20 2022 ] Eval epoch: 42
262
+ [ Thu Sep 15 18:38:43 2022 ] Mean test loss of 258 batches: 3.350965976715088.
263
+ [ Thu Sep 15 18:38:43 2022 ] Top1: 46.13%
264
+ [ Thu Sep 15 18:38:43 2022 ] Top5: 79.26%
265
+ [ Thu Sep 15 18:38:43 2022 ] Training epoch: 43
266
+ [ Thu Sep 15 18:39:00 2022 ] Batch(33/123) done. Loss: 0.1191 lr:0.100000 network_time: 0.0537
267
+ [ Thu Sep 15 18:39:33 2022 ] Eval epoch: 43
268
+ [ Thu Sep 15 18:39:56 2022 ] Mean test loss of 258 batches: 2.9140000343322754.
269
+ [ Thu Sep 15 18:39:56 2022 ] Top1: 49.75%
270
+ [ Thu Sep 15 18:39:56 2022 ] Top5: 83.55%
271
+ [ Thu Sep 15 18:39:56 2022 ] Training epoch: 44
272
+ [ Thu Sep 15 18:40:04 2022 ] Batch(10/123) done. Loss: 0.1089 lr:0.100000 network_time: 0.0501
273
+ [ Thu Sep 15 18:40:41 2022 ] Batch(110/123) done. Loss: 0.1441 lr:0.100000 network_time: 0.0509
274
+ [ Thu Sep 15 18:40:45 2022 ] Eval epoch: 44
275
+ [ Thu Sep 15 18:41:08 2022 ] Mean test loss of 258 batches: 2.5323615074157715.
276
+ [ Thu Sep 15 18:41:08 2022 ] Top1: 49.69%
277
+ [ Thu Sep 15 18:41:08 2022 ] Top5: 81.96%
278
+ [ Thu Sep 15 18:41:08 2022 ] Training epoch: 45
279
+ [ Thu Sep 15 18:41:44 2022 ] Batch(87/123) done. Loss: 0.0759 lr:0.100000 network_time: 0.0525
280
+ [ Thu Sep 15 18:41:57 2022 ] Eval epoch: 45
281
+ [ Thu Sep 15 18:42:20 2022 ] Mean test loss of 258 batches: 2.6945436000823975.
282
+ [ Thu Sep 15 18:42:20 2022 ] Top1: 48.29%
283
+ [ Thu Sep 15 18:42:20 2022 ] Top5: 80.38%
284
+ [ Thu Sep 15 18:42:20 2022 ] Training epoch: 46
285
+ [ Thu Sep 15 18:42:48 2022 ] Batch(64/123) done. Loss: 0.1732 lr:0.100000 network_time: 0.0540
286
+ [ Thu Sep 15 18:43:10 2022 ] Eval epoch: 46
287
+ [ Thu Sep 15 18:43:33 2022 ] Mean test loss of 258 batches: 3.0958104133605957.
288
+ [ Thu Sep 15 18:43:33 2022 ] Top1: 48.04%
289
+ [ Thu Sep 15 18:43:33 2022 ] Top5: 81.45%
290
+ [ Thu Sep 15 18:43:33 2022 ] Training epoch: 47
291
+ [ Thu Sep 15 18:43:53 2022 ] Batch(41/123) done. Loss: 0.3669 lr:0.100000 network_time: 0.0520
292
+ [ Thu Sep 15 18:44:23 2022 ] Eval epoch: 47
293
+ [ Thu Sep 15 18:44:46 2022 ] Mean test loss of 258 batches: 3.1610655784606934.
294
+ [ Thu Sep 15 18:44:46 2022 ] Top1: 48.95%
295
+ [ Thu Sep 15 18:44:46 2022 ] Top5: 82.23%
296
+ [ Thu Sep 15 18:44:46 2022 ] Training epoch: 48
297
+ [ Thu Sep 15 18:44:57 2022 ] Batch(18/123) done. Loss: 0.2774 lr:0.100000 network_time: 0.0512
298
+ [ Thu Sep 15 18:45:33 2022 ] Batch(118/123) done. Loss: 0.1066 lr:0.100000 network_time: 0.0502
299
+ [ Thu Sep 15 18:45:35 2022 ] Eval epoch: 48
300
+ [ Thu Sep 15 18:45:58 2022 ] Mean test loss of 258 batches: 2.4199440479278564.
301
+ [ Thu Sep 15 18:45:58 2022 ] Top1: 53.91%
302
+ [ Thu Sep 15 18:45:58 2022 ] Top5: 84.36%
303
+ [ Thu Sep 15 18:45:58 2022 ] Training epoch: 49
304
+ [ Thu Sep 15 18:46:37 2022 ] Batch(95/123) done. Loss: 0.1835 lr:0.100000 network_time: 0.0520
305
+ [ Thu Sep 15 18:46:47 2022 ] Eval epoch: 49
306
+ [ Thu Sep 15 18:47:10 2022 ] Mean test loss of 258 batches: 2.5721118450164795.
307
+ [ Thu Sep 15 18:47:10 2022 ] Top1: 53.58%
308
+ [ Thu Sep 15 18:47:10 2022 ] Top5: 84.75%
309
+ [ Thu Sep 15 18:47:10 2022 ] Training epoch: 50
310
+ [ Thu Sep 15 18:47:41 2022 ] Batch(72/123) done. Loss: 0.1424 lr:0.100000 network_time: 0.0493
311
+ [ Thu Sep 15 18:47:59 2022 ] Eval epoch: 50
312
+ [ Thu Sep 15 18:48:22 2022 ] Mean test loss of 258 batches: 2.8164403438568115.
313
+ [ Thu Sep 15 18:48:22 2022 ] Top1: 49.89%
314
+ [ Thu Sep 15 18:48:22 2022 ] Top5: 83.20%
315
+ [ Thu Sep 15 18:48:22 2022 ] Training epoch: 51
316
+ [ Thu Sep 15 18:48:45 2022 ] Batch(49/123) done. Loss: 0.2530 lr:0.100000 network_time: 0.0525
317
+ [ Thu Sep 15 18:49:11 2022 ] Eval epoch: 51
318
+ [ Thu Sep 15 18:49:34 2022 ] Mean test loss of 258 batches: 3.0064849853515625.
319
+ [ Thu Sep 15 18:49:34 2022 ] Top1: 48.81%
320
+ [ Thu Sep 15 18:49:34 2022 ] Top5: 81.22%
321
+ [ Thu Sep 15 18:49:34 2022 ] Training epoch: 52
322
+ [ Thu Sep 15 18:49:48 2022 ] Batch(26/123) done. Loss: 0.2124 lr:0.100000 network_time: 0.0564
323
+ [ Thu Sep 15 18:50:23 2022 ] Eval epoch: 52
324
+ [ Thu Sep 15 18:50:46 2022 ] Mean test loss of 258 batches: 2.5954577922821045.
325
+ [ Thu Sep 15 18:50:47 2022 ] Top1: 51.74%
326
+ [ Thu Sep 15 18:50:47 2022 ] Top5: 84.36%
327
+ [ Thu Sep 15 18:50:47 2022 ] Training epoch: 53
328
+ [ Thu Sep 15 18:50:52 2022 ] Batch(3/123) done. Loss: 0.1510 lr:0.100000 network_time: 0.0513
329
+ [ Thu Sep 15 18:51:29 2022 ] Batch(103/123) done. Loss: 0.1510 lr:0.100000 network_time: 0.0507
330
+ [ Thu Sep 15 18:51:36 2022 ] Eval epoch: 53
331
+ [ Thu Sep 15 18:51:59 2022 ] Mean test loss of 258 batches: 2.450814723968506.
332
+ [ Thu Sep 15 18:51:59 2022 ] Top1: 52.99%
333
+ [ Thu Sep 15 18:51:59 2022 ] Top5: 84.93%
334
+ [ Thu Sep 15 18:51:59 2022 ] Training epoch: 54
335
+ [ Thu Sep 15 18:52:33 2022 ] Batch(80/123) done. Loss: 0.4379 lr:0.100000 network_time: 0.0502
336
+ [ Thu Sep 15 18:52:49 2022 ] Eval epoch: 54
337
+ [ Thu Sep 15 18:53:11 2022 ] Mean test loss of 258 batches: 2.9808332920074463.
338
+ [ Thu Sep 15 18:53:12 2022 ] Top1: 49.61%
339
+ [ Thu Sep 15 18:53:12 2022 ] Top5: 82.16%
340
+ [ Thu Sep 15 18:53:12 2022 ] Training epoch: 55
341
+ [ Thu Sep 15 18:53:37 2022 ] Batch(57/123) done. Loss: 0.1202 lr:0.100000 network_time: 0.0512
342
+ [ Thu Sep 15 18:54:01 2022 ] Eval epoch: 55
343
+ [ Thu Sep 15 18:54:24 2022 ] Mean test loss of 258 batches: 2.49838924407959.
344
+ [ Thu Sep 15 18:54:24 2022 ] Top1: 51.74%
345
+ [ Thu Sep 15 18:54:24 2022 ] Top5: 84.11%
346
+ [ Thu Sep 15 18:54:24 2022 ] Training epoch: 56
347
+ [ Thu Sep 15 18:54:41 2022 ] Batch(34/123) done. Loss: 0.1587 lr:0.100000 network_time: 0.0521
348
+ [ Thu Sep 15 18:55:13 2022 ] Eval epoch: 56
349
+ [ Thu Sep 15 18:55:36 2022 ] Mean test loss of 258 batches: 2.4359495639801025.
350
+ [ Thu Sep 15 18:55:36 2022 ] Top1: 53.91%
351
+ [ Thu Sep 15 18:55:36 2022 ] Top5: 84.18%
352
+ [ Thu Sep 15 18:55:36 2022 ] Training epoch: 57
353
+ [ Thu Sep 15 18:55:45 2022 ] Batch(11/123) done. Loss: 0.1714 lr:0.100000 network_time: 0.0544
354
+ [ Thu Sep 15 18:56:21 2022 ] Batch(111/123) done. Loss: 0.1316 lr:0.100000 network_time: 0.0676
355
+ [ Thu Sep 15 18:56:26 2022 ] Eval epoch: 57
356
+ [ Thu Sep 15 18:56:49 2022 ] Mean test loss of 258 batches: 2.2723708152770996.
357
+ [ Thu Sep 15 18:56:49 2022 ] Top1: 55.72%
358
+ [ Thu Sep 15 18:56:49 2022 ] Top5: 86.43%
359
+ [ Thu Sep 15 18:56:49 2022 ] Training epoch: 58
360
+ [ Thu Sep 15 18:57:26 2022 ] Batch(88/123) done. Loss: 0.0955 lr:0.100000 network_time: 0.0541
361
+ [ Thu Sep 15 18:57:38 2022 ] Eval epoch: 58
362
+ [ Thu Sep 15 18:58:01 2022 ] Mean test loss of 258 batches: 3.134904623031616.
363
+ [ Thu Sep 15 18:58:01 2022 ] Top1: 47.00%
364
+ [ Thu Sep 15 18:58:01 2022 ] Top5: 79.91%
365
+ [ Thu Sep 15 18:58:01 2022 ] Training epoch: 59
366
+ [ Thu Sep 15 18:58:30 2022 ] Batch(65/123) done. Loss: 0.1218 lr:0.100000 network_time: 0.0520
367
+ [ Thu Sep 15 18:58:51 2022 ] Eval epoch: 59
368
+ [ Thu Sep 15 18:59:14 2022 ] Mean test loss of 258 batches: 2.147733449935913.
369
+ [ Thu Sep 15 18:59:14 2022 ] Top1: 57.50%
370
+ [ Thu Sep 15 18:59:14 2022 ] Top5: 88.17%
371
+ [ Thu Sep 15 18:59:14 2022 ] Training epoch: 60
372
+ [ Thu Sep 15 18:59:34 2022 ] Batch(42/123) done. Loss: 0.0591 lr:0.100000 network_time: 0.0594
373
+ [ Thu Sep 15 19:00:03 2022 ] Eval epoch: 60
374
+ [ Thu Sep 15 19:00:26 2022 ] Mean test loss of 258 batches: 2.7868356704711914.
375
+ [ Thu Sep 15 19:00:26 2022 ] Top1: 52.62%
376
+ [ Thu Sep 15 19:00:26 2022 ] Top5: 84.46%
377
+ [ Thu Sep 15 19:00:26 2022 ] Training epoch: 61
378
+ [ Thu Sep 15 19:00:38 2022 ] Batch(19/123) done. Loss: 0.1423 lr:0.010000 network_time: 0.0548
379
+ [ Thu Sep 15 19:01:14 2022 ] Batch(119/123) done. Loss: 0.0520 lr:0.010000 network_time: 0.0510
380
+ [ Thu Sep 15 19:01:16 2022 ] Eval epoch: 61
381
+ [ Thu Sep 15 19:01:39 2022 ] Mean test loss of 258 batches: 1.9882783889770508.
382
+ [ Thu Sep 15 19:01:39 2022 ] Top1: 61.74%
383
+ [ Thu Sep 15 19:01:39 2022 ] Top5: 90.26%
384
+ [ Thu Sep 15 19:01:39 2022 ] Training epoch: 62
385
+ [ Thu Sep 15 19:02:18 2022 ] Batch(96/123) done. Loss: 0.0392 lr:0.010000 network_time: 0.0556
386
+ [ Thu Sep 15 19:02:28 2022 ] Eval epoch: 62
387
+ [ Thu Sep 15 19:02:51 2022 ] Mean test loss of 258 batches: 1.8952713012695312.
388
+ [ Thu Sep 15 19:02:51 2022 ] Top1: 62.84%
389
+ [ Thu Sep 15 19:02:51 2022 ] Top5: 90.74%
390
+ [ Thu Sep 15 19:02:51 2022 ] Training epoch: 63
391
+ [ Thu Sep 15 19:03:23 2022 ] Batch(73/123) done. Loss: 0.0216 lr:0.010000 network_time: 0.0502
392
+ [ Thu Sep 15 19:03:41 2022 ] Eval epoch: 63
393
+ [ Thu Sep 15 19:04:04 2022 ] Mean test loss of 258 batches: 2.003861665725708.
394
+ [ Thu Sep 15 19:04:04 2022 ] Top1: 62.39%
395
+ [ Thu Sep 15 19:04:04 2022 ] Top5: 90.72%
396
+ [ Thu Sep 15 19:04:04 2022 ] Training epoch: 64
397
+ [ Thu Sep 15 19:04:27 2022 ] Batch(50/123) done. Loss: 0.0158 lr:0.010000 network_time: 0.0504
398
+ [ Thu Sep 15 19:04:53 2022 ] Eval epoch: 64
399
+ [ Thu Sep 15 19:05:17 2022 ] Mean test loss of 258 batches: 1.8763132095336914.
400
+ [ Thu Sep 15 19:05:17 2022 ] Top1: 63.09%
401
+ [ Thu Sep 15 19:05:17 2022 ] Top5: 90.86%
402
+ [ Thu Sep 15 19:05:17 2022 ] Training epoch: 65
403
+ [ Thu Sep 15 19:05:31 2022 ] Batch(27/123) done. Loss: 0.0109 lr:0.010000 network_time: 0.0502
404
+ [ Thu Sep 15 19:06:06 2022 ] Eval epoch: 65
405
+ [ Thu Sep 15 19:06:29 2022 ] Mean test loss of 258 batches: 1.8442339897155762.
406
+ [ Thu Sep 15 19:06:29 2022 ] Top1: 63.47%
407
+ [ Thu Sep 15 19:06:29 2022 ] Top5: 91.11%
408
+ [ Thu Sep 15 19:06:29 2022 ] Training epoch: 66
409
+ [ Thu Sep 15 19:06:35 2022 ] Batch(4/123) done. Loss: 0.0147 lr:0.010000 network_time: 0.0531
410
+ [ Thu Sep 15 19:07:12 2022 ] Batch(104/123) done. Loss: 0.0279 lr:0.010000 network_time: 0.0553
411
+ [ Thu Sep 15 19:07:18 2022 ] Eval epoch: 66
412
+ [ Thu Sep 15 19:07:41 2022 ] Mean test loss of 258 batches: 1.8917242288589478.
413
+ [ Thu Sep 15 19:07:41 2022 ] Top1: 63.57%
414
+ [ Thu Sep 15 19:07:41 2022 ] Top5: 90.88%
415
+ [ Thu Sep 15 19:07:41 2022 ] Training epoch: 67
416
+ [ Thu Sep 15 19:08:16 2022 ] Batch(81/123) done. Loss: 0.0362 lr:0.010000 network_time: 0.0500
417
+ [ Thu Sep 15 19:08:31 2022 ] Eval epoch: 67
418
+ [ Thu Sep 15 19:08:54 2022 ] Mean test loss of 258 batches: 2.1126983165740967.
419
+ [ Thu Sep 15 19:08:54 2022 ] Top1: 59.51%
420
+ [ Thu Sep 15 19:08:54 2022 ] Top5: 89.03%
421
+ [ Thu Sep 15 19:08:54 2022 ] Training epoch: 68
422
+ [ Thu Sep 15 19:09:21 2022 ] Batch(58/123) done. Loss: 0.0134 lr:0.010000 network_time: 0.0545
423
+ [ Thu Sep 15 19:09:44 2022 ] Eval epoch: 68
424
+ [ Thu Sep 15 19:10:07 2022 ] Mean test loss of 258 batches: 1.7624281644821167.
425
+ [ Thu Sep 15 19:10:07 2022 ] Top1: 63.74%
426
+ [ Thu Sep 15 19:10:07 2022 ] Top5: 91.11%
427
+ [ Thu Sep 15 19:10:07 2022 ] Training epoch: 69
428
+ [ Thu Sep 15 19:10:25 2022 ] Batch(35/123) done. Loss: 0.0205 lr:0.010000 network_time: 0.0537
429
+ [ Thu Sep 15 19:10:57 2022 ] Eval epoch: 69
430
+ [ Thu Sep 15 19:11:20 2022 ] Mean test loss of 258 batches: 1.856307864189148.
431
+ [ Thu Sep 15 19:11:20 2022 ] Top1: 63.09%
432
+ [ Thu Sep 15 19:11:20 2022 ] Top5: 90.88%
433
+ [ Thu Sep 15 19:11:20 2022 ] Training epoch: 70
434
+ [ Thu Sep 15 19:11:29 2022 ] Batch(12/123) done. Loss: 0.0041 lr:0.010000 network_time: 0.0510
435
+ [ Thu Sep 15 19:12:05 2022 ] Batch(112/123) done. Loss: 0.0076 lr:0.010000 network_time: 0.0499
436
+ [ Thu Sep 15 19:12:09 2022 ] Eval epoch: 70
437
+ [ Thu Sep 15 19:12:32 2022 ] Mean test loss of 258 batches: 1.915732502937317.
438
+ [ Thu Sep 15 19:12:32 2022 ] Top1: 63.19%
439
+ [ Thu Sep 15 19:12:32 2022 ] Top5: 90.90%
440
+ [ Thu Sep 15 19:12:32 2022 ] Training epoch: 71
441
+ [ Thu Sep 15 19:13:09 2022 ] Batch(89/123) done. Loss: 0.0098 lr:0.010000 network_time: 0.0484
442
+ [ Thu Sep 15 19:13:22 2022 ] Eval epoch: 71
443
+ [ Thu Sep 15 19:13:45 2022 ] Mean test loss of 258 batches: 1.8895395994186401.
444
+ [ Thu Sep 15 19:13:45 2022 ] Top1: 63.75%
445
+ [ Thu Sep 15 19:13:45 2022 ] Top5: 91.01%
446
+ [ Thu Sep 15 19:13:45 2022 ] Training epoch: 72
447
+ [ Thu Sep 15 19:14:13 2022 ] Batch(66/123) done. Loss: 0.0059 lr:0.010000 network_time: 0.0547
448
+ [ Thu Sep 15 19:14:34 2022 ] Eval epoch: 72
449
+ [ Thu Sep 15 19:14:57 2022 ] Mean test loss of 258 batches: 1.7950252294540405.
450
+ [ Thu Sep 15 19:14:58 2022 ] Top1: 64.19%
451
+ [ Thu Sep 15 19:14:58 2022 ] Top5: 91.25%
452
+ [ Thu Sep 15 19:14:58 2022 ] Training epoch: 73
453
+ [ Thu Sep 15 19:15:17 2022 ] Batch(43/123) done. Loss: 0.0060 lr:0.010000 network_time: 0.0522
454
+ [ Thu Sep 15 19:15:47 2022 ] Eval epoch: 73
455
+ [ Thu Sep 15 19:16:09 2022 ] Mean test loss of 258 batches: 1.905718445777893.
456
+ [ Thu Sep 15 19:16:09 2022 ] Top1: 62.58%
457
+ [ Thu Sep 15 19:16:09 2022 ] Top5: 90.58%
458
+ [ Thu Sep 15 19:16:10 2022 ] Training epoch: 74
459
+ [ Thu Sep 15 19:16:21 2022 ] Batch(20/123) done. Loss: 0.0056 lr:0.010000 network_time: 0.0519
460
+ [ Thu Sep 15 19:16:58 2022 ] Batch(120/123) done. Loss: 0.0050 lr:0.010000 network_time: 0.0489
461
+ [ Thu Sep 15 19:16:59 2022 ] Eval epoch: 74
462
+ [ Thu Sep 15 19:17:21 2022 ] Mean test loss of 258 batches: 2.057525873184204.
463
+ [ Thu Sep 15 19:17:21 2022 ] Top1: 62.66%
464
+ [ Thu Sep 15 19:17:21 2022 ] Top5: 90.36%
465
+ [ Thu Sep 15 19:17:21 2022 ] Training epoch: 75
466
+ [ Thu Sep 15 19:18:01 2022 ] Batch(97/123) done. Loss: 0.0047 lr:0.010000 network_time: 0.0507
467
+ [ Thu Sep 15 19:18:11 2022 ] Eval epoch: 75
468
+ [ Thu Sep 15 19:18:34 2022 ] Mean test loss of 258 batches: 1.8387532234191895.
469
+ [ Thu Sep 15 19:18:34 2022 ] Top1: 63.74%
470
+ [ Thu Sep 15 19:18:34 2022 ] Top5: 91.15%
471
+ [ Thu Sep 15 19:18:34 2022 ] Training epoch: 76
472
+ [ Thu Sep 15 19:19:05 2022 ] Batch(74/123) done. Loss: 0.0057 lr:0.010000 network_time: 0.0508
473
+ [ Thu Sep 15 19:19:23 2022 ] Eval epoch: 76
474
+ [ Thu Sep 15 19:19:46 2022 ] Mean test loss of 258 batches: 1.8769611120224.
475
+ [ Thu Sep 15 19:19:46 2022 ] Top1: 62.75%
476
+ [ Thu Sep 15 19:19:46 2022 ] Top5: 90.65%
477
+ [ Thu Sep 15 19:19:46 2022 ] Training epoch: 77
478
+ [ Thu Sep 15 19:20:09 2022 ] Batch(51/123) done. Loss: 0.0035 lr:0.010000 network_time: 0.0497
479
+ [ Thu Sep 15 19:20:35 2022 ] Eval epoch: 77
480
+ [ Thu Sep 15 19:20:58 2022 ] Mean test loss of 258 batches: 2.2678627967834473.
481
+ [ Thu Sep 15 19:20:58 2022 ] Top1: 57.30%
482
+ [ Thu Sep 15 19:20:58 2022 ] Top5: 87.72%
483
+ [ Thu Sep 15 19:20:59 2022 ] Training epoch: 78
484
+ [ Thu Sep 15 19:21:13 2022 ] Batch(28/123) done. Loss: 0.0167 lr:0.010000 network_time: 0.0475
485
+ [ Thu Sep 15 19:21:47 2022 ] Eval epoch: 78
486
+ [ Thu Sep 15 19:22:10 2022 ] Mean test loss of 258 batches: 1.913560152053833.
487
+ [ Thu Sep 15 19:22:10 2022 ] Top1: 63.26%
488
+ [ Thu Sep 15 19:22:10 2022 ] Top5: 90.94%
489
+ [ Thu Sep 15 19:22:11 2022 ] Training epoch: 79
490
+ [ Thu Sep 15 19:22:17 2022 ] Batch(5/123) done. Loss: 0.0013 lr:0.010000 network_time: 0.0480
491
+ [ Thu Sep 15 19:22:53 2022 ] Batch(105/123) done. Loss: 0.0101 lr:0.010000 network_time: 0.0585
492
+ [ Thu Sep 15 19:23:00 2022 ] Eval epoch: 79
493
+ [ Thu Sep 15 19:23:23 2022 ] Mean test loss of 258 batches: 2.011691093444824.
494
+ [ Thu Sep 15 19:23:23 2022 ] Top1: 60.81%
495
+ [ Thu Sep 15 19:23:23 2022 ] Top5: 89.62%
496
+ [ Thu Sep 15 19:23:23 2022 ] Training epoch: 80
497
+ [ Thu Sep 15 19:23:57 2022 ] Batch(82/123) done. Loss: 0.0062 lr:0.010000 network_time: 0.0535
498
+ [ Thu Sep 15 19:24:12 2022 ] Eval epoch: 80
499
+ [ Thu Sep 15 19:24:36 2022 ] Mean test loss of 258 batches: 1.7865822315216064.
500
+ [ Thu Sep 15 19:24:36 2022 ] Top1: 64.81%
501
+ [ Thu Sep 15 19:24:36 2022 ] Top5: 91.50%
502
+ [ Thu Sep 15 19:24:36 2022 ] Training epoch: 81
503
+ [ Thu Sep 15 19:25:02 2022 ] Batch(59/123) done. Loss: 0.0120 lr:0.001000 network_time: 0.0486
504
+ [ Thu Sep 15 19:25:25 2022 ] Eval epoch: 81
505
+ [ Thu Sep 15 19:25:47 2022 ] Mean test loss of 258 batches: 1.9796819686889648.
506
+ [ Thu Sep 15 19:25:47 2022 ] Top1: 63.44%
507
+ [ Thu Sep 15 19:25:48 2022 ] Top5: 90.97%
508
+ [ Thu Sep 15 19:25:48 2022 ] Training epoch: 82
509
+ [ Thu Sep 15 19:26:05 2022 ] Batch(36/123) done. Loss: 0.0259 lr:0.001000 network_time: 0.0502
510
+ [ Thu Sep 15 19:26:37 2022 ] Eval epoch: 82
511
+ [ Thu Sep 15 19:27:00 2022 ] Mean test loss of 258 batches: 1.9062645435333252.
512
+ [ Thu Sep 15 19:27:00 2022 ] Top1: 63.77%
513
+ [ Thu Sep 15 19:27:00 2022 ] Top5: 90.79%
514
+ [ Thu Sep 15 19:27:00 2022 ] Training epoch: 83
515
+ [ Thu Sep 15 19:27:10 2022 ] Batch(13/123) done. Loss: 0.0035 lr:0.001000 network_time: 0.0514
516
+ [ Thu Sep 15 19:27:46 2022 ] Batch(113/123) done. Loss: 0.0037 lr:0.001000 network_time: 0.0515
517
+ [ Thu Sep 15 19:27:50 2022 ] Eval epoch: 83
518
+ [ Thu Sep 15 19:28:13 2022 ] Mean test loss of 258 batches: 1.767254114151001.
519
+ [ Thu Sep 15 19:28:13 2022 ] Top1: 64.40%
520
+ [ Thu Sep 15 19:28:13 2022 ] Top5: 91.42%
521
+ [ Thu Sep 15 19:28:13 2022 ] Training epoch: 84
522
+ [ Thu Sep 15 19:28:50 2022 ] Batch(90/123) done. Loss: 0.0036 lr:0.001000 network_time: 0.0538
523
+ [ Thu Sep 15 19:29:02 2022 ] Eval epoch: 84
524
+ [ Thu Sep 15 19:29:24 2022 ] Mean test loss of 258 batches: 1.8931705951690674.
525
+ [ Thu Sep 15 19:29:25 2022 ] Top1: 63.83%
526
+ [ Thu Sep 15 19:29:25 2022 ] Top5: 91.12%
527
+ [ Thu Sep 15 19:29:25 2022 ] Training epoch: 85
528
+ [ Thu Sep 15 19:29:53 2022 ] Batch(67/123) done. Loss: 0.0086 lr:0.001000 network_time: 0.0503
529
+ [ Thu Sep 15 19:30:14 2022 ] Eval epoch: 85
530
+ [ Thu Sep 15 19:30:37 2022 ] Mean test loss of 258 batches: 1.8183932304382324.
531
+ [ Thu Sep 15 19:30:37 2022 ] Top1: 64.40%
532
+ [ Thu Sep 15 19:30:37 2022 ] Top5: 91.35%
533
+ [ Thu Sep 15 19:30:37 2022 ] Training epoch: 86
534
+ [ Thu Sep 15 19:30:57 2022 ] Batch(44/123) done. Loss: 0.0045 lr:0.001000 network_time: 0.0499
535
+ [ Thu Sep 15 19:31:26 2022 ] Eval epoch: 86
536
+ [ Thu Sep 15 19:31:49 2022 ] Mean test loss of 258 batches: 1.9683622121810913.
537
+ [ Thu Sep 15 19:31:49 2022 ] Top1: 62.00%
538
+ [ Thu Sep 15 19:31:49 2022 ] Top5: 89.91%
539
+ [ Thu Sep 15 19:31:49 2022 ] Training epoch: 87
540
+ [ Thu Sep 15 19:32:01 2022 ] Batch(21/123) done. Loss: 0.0036 lr:0.001000 network_time: 0.0562
541
+ [ Thu Sep 15 19:32:38 2022 ] Batch(121/123) done. Loss: 0.0027 lr:0.001000 network_time: 0.0530
542
+ [ Thu Sep 15 19:32:38 2022 ] Eval epoch: 87
543
+ [ Thu Sep 15 19:33:01 2022 ] Mean test loss of 258 batches: 1.8199169635772705.
544
+ [ Thu Sep 15 19:33:01 2022 ] Top1: 64.49%
545
+ [ Thu Sep 15 19:33:01 2022 ] Top5: 91.22%
546
+ [ Thu Sep 15 19:33:01 2022 ] Training epoch: 88
547
+ [ Thu Sep 15 19:33:42 2022 ] Batch(98/123) done. Loss: 0.0041 lr:0.001000 network_time: 0.0566
548
+ [ Thu Sep 15 19:33:51 2022 ] Eval epoch: 88
549
+ [ Thu Sep 15 19:34:14 2022 ] Mean test loss of 258 batches: 1.8484878540039062.
550
+ [ Thu Sep 15 19:34:14 2022 ] Top1: 63.99%
551
+ [ Thu Sep 15 19:34:14 2022 ] Top5: 91.19%
552
+ [ Thu Sep 15 19:34:14 2022 ] Training epoch: 89
553
+ [ Thu Sep 15 19:34:45 2022 ] Batch(75/123) done. Loss: 0.0067 lr:0.001000 network_time: 0.0500
554
+ [ Thu Sep 15 19:35:03 2022 ] Eval epoch: 89
555
+ [ Thu Sep 15 19:35:26 2022 ] Mean test loss of 258 batches: 1.8942618370056152.
556
+ [ Thu Sep 15 19:35:26 2022 ] Top1: 62.35%
557
+ [ Thu Sep 15 19:35:26 2022 ] Top5: 90.16%
558
+ [ Thu Sep 15 19:35:26 2022 ] Training epoch: 90
559
+ [ Thu Sep 15 19:35:50 2022 ] Batch(52/123) done. Loss: 0.0045 lr:0.001000 network_time: 0.0503
560
+ [ Thu Sep 15 19:36:16 2022 ] Eval epoch: 90
561
+ [ Thu Sep 15 19:36:38 2022 ] Mean test loss of 258 batches: 1.8252038955688477.
562
+ [ Thu Sep 15 19:36:38 2022 ] Top1: 64.66%
563
+ [ Thu Sep 15 19:36:39 2022 ] Top5: 91.42%
564
+ [ Thu Sep 15 19:36:39 2022 ] Training epoch: 91
565
+ [ Thu Sep 15 19:36:54 2022 ] Batch(29/123) done. Loss: 0.0031 lr:0.001000 network_time: 0.0505
566
+ [ Thu Sep 15 19:37:28 2022 ] Eval epoch: 91
567
+ [ Thu Sep 15 19:37:51 2022 ] Mean test loss of 258 batches: 1.8925552368164062.
568
+ [ Thu Sep 15 19:37:51 2022 ] Top1: 63.90%
569
+ [ Thu Sep 15 19:37:52 2022 ] Top5: 90.99%
570
+ [ Thu Sep 15 19:37:52 2022 ] Training epoch: 92
571
+ [ Thu Sep 15 19:37:58 2022 ] Batch(6/123) done. Loss: 0.0046 lr:0.001000 network_time: 0.0621
572
+ [ Thu Sep 15 19:38:35 2022 ] Batch(106/123) done. Loss: 0.0084 lr:0.001000 network_time: 0.0526
573
+ [ Thu Sep 15 19:38:41 2022 ] Eval epoch: 92
574
+ [ Thu Sep 15 19:39:04 2022 ] Mean test loss of 258 batches: 1.8438127040863037.
575
+ [ Thu Sep 15 19:39:04 2022 ] Top1: 64.34%
576
+ [ Thu Sep 15 19:39:04 2022 ] Top5: 91.21%
577
+ [ Thu Sep 15 19:39:04 2022 ] Training epoch: 93
578
+ [ Thu Sep 15 19:39:39 2022 ] Batch(83/123) done. Loss: 0.0102 lr:0.001000 network_time: 0.0537
579
+ [ Thu Sep 15 19:39:54 2022 ] Eval epoch: 93
580
+ [ Thu Sep 15 19:40:16 2022 ] Mean test loss of 258 batches: 1.8425955772399902.
581
+ [ Thu Sep 15 19:40:16 2022 ] Top1: 64.31%
582
+ [ Thu Sep 15 19:40:16 2022 ] Top5: 91.17%
583
+ [ Thu Sep 15 19:40:17 2022 ] Training epoch: 94
584
+ [ Thu Sep 15 19:40:43 2022 ] Batch(60/123) done. Loss: 0.0067 lr:0.001000 network_time: 0.0549
585
+ [ Thu Sep 15 19:41:06 2022 ] Eval epoch: 94
586
+ [ Thu Sep 15 19:41:29 2022 ] Mean test loss of 258 batches: 1.8022571802139282.
587
+ [ Thu Sep 15 19:41:29 2022 ] Top1: 64.40%
588
+ [ Thu Sep 15 19:41:29 2022 ] Top5: 91.19%
589
+ [ Thu Sep 15 19:41:29 2022 ] Training epoch: 95
590
+ [ Thu Sep 15 19:41:47 2022 ] Batch(37/123) done. Loss: 0.0038 lr:0.001000 network_time: 0.0516
591
+ [ Thu Sep 15 19:42:18 2022 ] Eval epoch: 95
592
+ [ Thu Sep 15 19:42:41 2022 ] Mean test loss of 258 batches: 1.8983581066131592.
593
+ [ Thu Sep 15 19:42:41 2022 ] Top1: 63.29%
594
+ [ Thu Sep 15 19:42:41 2022 ] Top5: 90.73%
595
+ [ Thu Sep 15 19:42:41 2022 ] Training epoch: 96
596
+ [ Thu Sep 15 19:42:51 2022 ] Batch(14/123) done. Loss: 0.0238 lr:0.001000 network_time: 0.0616
597
+ [ Thu Sep 15 19:43:27 2022 ] Batch(114/123) done. Loss: 0.0055 lr:0.001000 network_time: 0.0515
598
+ [ Thu Sep 15 19:43:30 2022 ] Eval epoch: 96
599
+ [ Thu Sep 15 19:43:53 2022 ] Mean test loss of 258 batches: 1.8857086896896362.
600
+ [ Thu Sep 15 19:43:53 2022 ] Top1: 63.26%
601
+ [ Thu Sep 15 19:43:53 2022 ] Top5: 90.91%
602
+ [ Thu Sep 15 19:43:53 2022 ] Training epoch: 97
603
+ [ Thu Sep 15 19:44:31 2022 ] Batch(91/123) done. Loss: 0.0085 lr:0.001000 network_time: 0.0533
604
+ [ Thu Sep 15 19:44:43 2022 ] Eval epoch: 97
605
+ [ Thu Sep 15 19:45:06 2022 ] Mean test loss of 258 batches: 1.876707911491394.
606
+ [ Thu Sep 15 19:45:06 2022 ] Top1: 64.09%
607
+ [ Thu Sep 15 19:45:06 2022 ] Top5: 91.19%
608
+ [ Thu Sep 15 19:45:06 2022 ] Training epoch: 98
609
+ [ Thu Sep 15 19:45:35 2022 ] Batch(68/123) done. Loss: 0.0062 lr:0.001000 network_time: 0.0534
610
+ [ Thu Sep 15 19:45:55 2022 ] Eval epoch: 98
611
+ [ Thu Sep 15 19:46:18 2022 ] Mean test loss of 258 batches: 1.9330939054489136.
612
+ [ Thu Sep 15 19:46:18 2022 ] Top1: 63.33%
613
+ [ Thu Sep 15 19:46:18 2022 ] Top5: 90.91%
614
+ [ Thu Sep 15 19:46:18 2022 ] Training epoch: 99
615
+ [ Thu Sep 15 19:46:39 2022 ] Batch(45/123) done. Loss: 0.0026 lr:0.001000 network_time: 0.0587
616
+ [ Thu Sep 15 19:47:08 2022 ] Eval epoch: 99
617
+ [ Thu Sep 15 19:47:30 2022 ] Mean test loss of 258 batches: 1.8569103479385376.
618
+ [ Thu Sep 15 19:47:30 2022 ] Top1: 64.52%
619
+ [ Thu Sep 15 19:47:30 2022 ] Top5: 91.11%
620
+ [ Thu Sep 15 19:47:30 2022 ] Training epoch: 100
621
+ [ Thu Sep 15 19:47:43 2022 ] Batch(22/123) done. Loss: 0.0122 lr:0.001000 network_time: 0.0563
622
+ [ Thu Sep 15 19:48:19 2022 ] Batch(122/123) done. Loss: 0.0033 lr:0.001000 network_time: 0.0531
623
+ [ Thu Sep 15 19:48:20 2022 ] Eval epoch: 100
624
+ [ Thu Sep 15 19:48:43 2022 ] Mean test loss of 258 batches: 1.8815741539001465.
625
+ [ Thu Sep 15 19:48:43 2022 ] Top1: 63.68%
626
+ [ Thu Sep 15 19:48:43 2022 ] Top5: 90.88%
627
+ [ Thu Sep 15 19:48:43 2022 ] Training epoch: 101
628
+ [ Thu Sep 15 19:49:24 2022 ] Batch(99/123) done. Loss: 0.0063 lr:0.000100 network_time: 0.0554
629
+ [ Thu Sep 15 19:49:32 2022 ] Eval epoch: 101
630
+ [ Thu Sep 15 19:49:55 2022 ] Mean test loss of 258 batches: 1.9553409814834595.
631
+ [ Thu Sep 15 19:49:55 2022 ] Top1: 63.68%
632
+ [ Thu Sep 15 19:49:55 2022 ] Top5: 90.88%
633
+ [ Thu Sep 15 19:49:56 2022 ] Training epoch: 102
634
+ [ Thu Sep 15 19:50:28 2022 ] Batch(76/123) done. Loss: 0.0070 lr:0.000100 network_time: 0.0523
635
+ [ Thu Sep 15 19:50:45 2022 ] Eval epoch: 102
636
+ [ Thu Sep 15 19:51:08 2022 ] Mean test loss of 258 batches: 2.0229227542877197.
637
+ [ Thu Sep 15 19:51:08 2022 ] Top1: 61.29%
638
+ [ Thu Sep 15 19:51:08 2022 ] Top5: 89.88%
639
+ [ Thu Sep 15 19:51:08 2022 ] Training epoch: 103
640
+ [ Thu Sep 15 19:51:31 2022 ] Batch(53/123) done. Loss: 0.0044 lr:0.000100 network_time: 0.0533
641
+ [ Thu Sep 15 19:51:57 2022 ] Eval epoch: 103
642
+ [ Thu Sep 15 19:52:20 2022 ] Mean test loss of 258 batches: 1.82876455783844.
643
+ [ Thu Sep 15 19:52:20 2022 ] Top1: 64.74%
644
+ [ Thu Sep 15 19:52:20 2022 ] Top5: 91.20%
645
+ [ Thu Sep 15 19:52:20 2022 ] Training epoch: 104
646
+ [ Thu Sep 15 19:52:36 2022 ] Batch(30/123) done. Loss: 0.0089 lr:0.000100 network_time: 0.0563
647
+ [ Thu Sep 15 19:53:10 2022 ] Eval epoch: 104
648
+ [ Thu Sep 15 19:53:32 2022 ] Mean test loss of 258 batches: 1.8227325677871704.
649
+ [ Thu Sep 15 19:53:32 2022 ] Top1: 64.23%
650
+ [ Thu Sep 15 19:53:32 2022 ] Top5: 91.13%
651
+ [ Thu Sep 15 19:53:32 2022 ] Training epoch: 105
652
+ [ Thu Sep 15 19:53:39 2022 ] Batch(7/123) done. Loss: 0.0048 lr:0.000100 network_time: 0.0492
653
+ [ Thu Sep 15 19:54:16 2022 ] Batch(107/123) done. Loss: 0.0035 lr:0.000100 network_time: 0.0532
654
+ [ Thu Sep 15 19:54:22 2022 ] Eval epoch: 105
655
+ [ Thu Sep 15 19:54:44 2022 ] Mean test loss of 258 batches: 1.8803633451461792.
656
+ [ Thu Sep 15 19:54:44 2022 ] Top1: 63.32%
657
+ [ Thu Sep 15 19:54:44 2022 ] Top5: 90.86%
658
+ [ Thu Sep 15 19:54:44 2022 ] Training epoch: 106
659
+ [ Thu Sep 15 19:55:19 2022 ] Batch(84/123) done. Loss: 0.0057 lr:0.000100 network_time: 0.0537
660
+ [ Thu Sep 15 19:55:34 2022 ] Eval epoch: 106
661
+ [ Thu Sep 15 19:55:57 2022 ] Mean test loss of 258 batches: 2.0569188594818115.
662
+ [ Thu Sep 15 19:55:57 2022 ] Top1: 61.10%
663
+ [ Thu Sep 15 19:55:57 2022 ] Top5: 89.51%
664
+ [ Thu Sep 15 19:55:57 2022 ] Training epoch: 107
665
+ [ Thu Sep 15 19:56:24 2022 ] Batch(61/123) done. Loss: 0.0156 lr:0.000100 network_time: 0.0561
666
+ [ Thu Sep 15 19:56:46 2022 ] Eval epoch: 107
667
+ [ Thu Sep 15 19:57:09 2022 ] Mean test loss of 258 batches: 1.8778842687606812.
668
+ [ Thu Sep 15 19:57:09 2022 ] Top1: 63.09%
669
+ [ Thu Sep 15 19:57:09 2022 ] Top5: 90.79%
670
+ [ Thu Sep 15 19:57:09 2022 ] Training epoch: 108
671
+ [ Thu Sep 15 19:57:28 2022 ] Batch(38/123) done. Loss: 0.0017 lr:0.000100 network_time: 0.0534
672
+ [ Thu Sep 15 19:57:59 2022 ] Eval epoch: 108
673
+ [ Thu Sep 15 19:58:22 2022 ] Mean test loss of 258 batches: 1.8048086166381836.
674
+ [ Thu Sep 15 19:58:22 2022 ] Top1: 64.37%
675
+ [ Thu Sep 15 19:58:22 2022 ] Top5: 91.36%
676
+ [ Thu Sep 15 19:58:22 2022 ] Training epoch: 109
677
+ [ Thu Sep 15 19:58:32 2022 ] Batch(15/123) done. Loss: 0.0029 lr:0.000100 network_time: 0.0545
678
+ [ Thu Sep 15 19:59:08 2022 ] Batch(115/123) done. Loss: 0.0072 lr:0.000100 network_time: 0.0506
679
+ [ Thu Sep 15 19:59:11 2022 ] Eval epoch: 109
680
+ [ Thu Sep 15 19:59:34 2022 ] Mean test loss of 258 batches: 1.9837099313735962.
681
+ [ Thu Sep 15 19:59:34 2022 ] Top1: 62.79%
682
+ [ Thu Sep 15 19:59:34 2022 ] Top5: 90.65%
683
+ [ Thu Sep 15 19:59:34 2022 ] Training epoch: 110
684
+ [ Thu Sep 15 20:00:12 2022 ] Batch(92/123) done. Loss: 0.0081 lr:0.000100 network_time: 0.0531
685
+ [ Thu Sep 15 20:00:23 2022 ] Eval epoch: 110
686
+ [ Thu Sep 15 20:00:46 2022 ] Mean test loss of 258 batches: 1.920461893081665.
687
+ [ Thu Sep 15 20:00:46 2022 ] Top1: 63.84%
688
+ [ Thu Sep 15 20:00:46 2022 ] Top5: 90.85%
689
+ [ Thu Sep 15 20:00:46 2022 ] Training epoch: 111
690
+ [ Thu Sep 15 20:01:16 2022 ] Batch(69/123) done. Loss: 0.0067 lr:0.000100 network_time: 0.0588
691
+ [ Thu Sep 15 20:01:36 2022 ] Eval epoch: 111
692
+ [ Thu Sep 15 20:01:58 2022 ] Mean test loss of 258 batches: 1.7909228801727295.
693
+ [ Thu Sep 15 20:01:58 2022 ] Top1: 64.79%
694
+ [ Thu Sep 15 20:01:58 2022 ] Top5: 91.53%
695
+ [ Thu Sep 15 20:01:58 2022 ] Training epoch: 112
696
+ [ Thu Sep 15 20:02:19 2022 ] Batch(46/123) done. Loss: 0.0023 lr:0.000100 network_time: 0.0538
697
+ [ Thu Sep 15 20:02:48 2022 ] Eval epoch: 112
698
+ [ Thu Sep 15 20:03:11 2022 ] Mean test loss of 258 batches: 1.8119471073150635.
699
+ [ Thu Sep 15 20:03:11 2022 ] Top1: 64.38%
700
+ [ Thu Sep 15 20:03:11 2022 ] Top5: 91.19%
701
+ [ Thu Sep 15 20:03:11 2022 ] Training epoch: 113
702
+ [ Thu Sep 15 20:03:24 2022 ] Batch(23/123) done. Loss: 0.0052 lr:0.000100 network_time: 0.0573
703
+ [ Thu Sep 15 20:04:00 2022 ] Eval epoch: 113
704
+ [ Thu Sep 15 20:04:23 2022 ] Mean test loss of 258 batches: 1.8072237968444824.
705
+ [ Thu Sep 15 20:04:23 2022 ] Top1: 64.49%
706
+ [ Thu Sep 15 20:04:23 2022 ] Top5: 91.28%
707
+ [ Thu Sep 15 20:04:23 2022 ] Training epoch: 114
708
+ [ Thu Sep 15 20:04:28 2022 ] Batch(0/123) done. Loss: 0.0142 lr:0.000100 network_time: 0.0936
709
+ [ Thu Sep 15 20:05:04 2022 ] Batch(100/123) done. Loss: 0.0016 lr:0.000100 network_time: 0.0529
710
+ [ Thu Sep 15 20:05:13 2022 ] Eval epoch: 114
711
+ [ Thu Sep 15 20:05:35 2022 ] Mean test loss of 258 batches: 2.0189363956451416.
712
+ [ Thu Sep 15 20:05:36 2022 ] Top1: 61.37%
713
+ [ Thu Sep 15 20:05:36 2022 ] Top5: 89.63%
714
+ [ Thu Sep 15 20:05:36 2022 ] Training epoch: 115
715
+ [ Thu Sep 15 20:06:08 2022 ] Batch(77/123) done. Loss: 0.0031 lr:0.000100 network_time: 0.0503
716
+ [ Thu Sep 15 20:06:25 2022 ] Eval epoch: 115
717
+ [ Thu Sep 15 20:06:48 2022 ] Mean test loss of 258 batches: 1.9490886926651.
718
+ [ Thu Sep 15 20:06:48 2022 ] Top1: 62.44%
719
+ [ Thu Sep 15 20:06:48 2022 ] Top5: 90.64%
720
+ [ Thu Sep 15 20:06:48 2022 ] Training epoch: 116
721
+ [ Thu Sep 15 20:07:13 2022 ] Batch(54/123) done. Loss: 0.0228 lr:0.000100 network_time: 0.0544
722
+ [ Thu Sep 15 20:07:38 2022 ] Eval epoch: 116
723
+ [ Thu Sep 15 20:08:01 2022 ] Mean test loss of 258 batches: 1.801637887954712.
724
+ [ Thu Sep 15 20:08:01 2022 ] Top1: 64.53%
725
+ [ Thu Sep 15 20:08:01 2022 ] Top5: 91.27%
726
+ [ Thu Sep 15 20:08:01 2022 ] Training epoch: 117
727
+ [ Thu Sep 15 20:08:17 2022 ] Batch(31/123) done. Loss: 0.0068 lr:0.000100 network_time: 0.0506
728
+ [ Thu Sep 15 20:08:50 2022 ] Eval epoch: 117
729
+ [ Thu Sep 15 20:09:13 2022 ] Mean test loss of 258 batches: 1.8541256189346313.
730
+ [ Thu Sep 15 20:09:13 2022 ] Top1: 63.92%
731
+ [ Thu Sep 15 20:09:13 2022 ] Top5: 91.17%
732
+ [ Thu Sep 15 20:09:13 2022 ] Training epoch: 118
733
+ [ Thu Sep 15 20:09:21 2022 ] Batch(8/123) done. Loss: 0.0072 lr:0.000100 network_time: 0.0568
734
+ [ Thu Sep 15 20:09:58 2022 ] Batch(108/123) done. Loss: 0.0101 lr:0.000100 network_time: 0.0597
735
+ [ Thu Sep 15 20:10:03 2022 ] Eval epoch: 118
736
+ [ Thu Sep 15 20:10:26 2022 ] Mean test loss of 258 batches: 2.009375810623169.
737
+ [ Thu Sep 15 20:10:26 2022 ] Top1: 62.33%
738
+ [ Thu Sep 15 20:10:26 2022 ] Top5: 90.37%
739
+ [ Thu Sep 15 20:10:26 2022 ] Training epoch: 119
740
+ [ Thu Sep 15 20:11:02 2022 ] Batch(85/123) done. Loss: 0.0018 lr:0.000100 network_time: 0.0489
741
+ [ Thu Sep 15 20:11:15 2022 ] Eval epoch: 119
742
+ [ Thu Sep 15 20:11:38 2022 ] Mean test loss of 258 batches: 1.7464431524276733.
743
+ [ Thu Sep 15 20:11:38 2022 ] Top1: 64.95%
744
+ [ Thu Sep 15 20:11:38 2022 ] Top5: 91.34%
745
+ [ Thu Sep 15 20:11:38 2022 ] Training epoch: 120
746
+ [ Thu Sep 15 20:12:05 2022 ] Batch(62/123) done. Loss: 0.0046 lr:0.000100 network_time: 0.0513
747
+ [ Thu Sep 15 20:12:28 2022 ] Eval epoch: 120
748
+ [ Thu Sep 15 20:12:51 2022 ] Mean test loss of 258 batches: 1.892992377281189.
749
+ [ Thu Sep 15 20:12:51 2022 ] Top1: 63.81%
750
+ [ Thu Sep 15 20:12:51 2022 ] Top5: 90.92%
751
+ [ Thu Sep 15 20:12:51 2022 ] Training epoch: 121
752
+ [ Thu Sep 15 20:13:09 2022 ] Batch(39/123) done. Loss: 0.0030 lr:0.000100 network_time: 0.0722
753
+ [ Thu Sep 15 20:13:40 2022 ] Eval epoch: 121
754
+ [ Thu Sep 15 20:14:03 2022 ] Mean test loss of 258 batches: 1.9506547451019287.
755
+ [ Thu Sep 15 20:14:03 2022 ] Top1: 63.22%
756
+ [ Thu Sep 15 20:14:03 2022 ] Top5: 90.80%
757
+ [ Thu Sep 15 20:14:03 2022 ] Training epoch: 122
758
+ [ Thu Sep 15 20:14:14 2022 ] Batch(16/123) done. Loss: 0.0073 lr:0.000100 network_time: 0.0579
759
+ [ Thu Sep 15 20:14:50 2022 ] Batch(116/123) done. Loss: 0.0041 lr:0.000100 network_time: 0.0492
760
+ [ Thu Sep 15 20:14:53 2022 ] Eval epoch: 122
761
+ [ Thu Sep 15 20:15:15 2022 ] Mean test loss of 258 batches: 1.9415020942687988.
762
+ [ Thu Sep 15 20:15:16 2022 ] Top1: 62.73%
763
+ [ Thu Sep 15 20:15:16 2022 ] Top5: 90.48%
764
+ [ Thu Sep 15 20:15:16 2022 ] Training epoch: 123
765
+ [ Thu Sep 15 20:15:54 2022 ] Batch(93/123) done. Loss: 0.0050 lr:0.000100 network_time: 0.0531
766
+ [ Thu Sep 15 20:16:05 2022 ] Eval epoch: 123
767
+ [ Thu Sep 15 20:16:28 2022 ] Mean test loss of 258 batches: 1.7792094945907593.
768
+ [ Thu Sep 15 20:16:28 2022 ] Top1: 64.38%
769
+ [ Thu Sep 15 20:16:28 2022 ] Top5: 91.42%
770
+ [ Thu Sep 15 20:16:28 2022 ] Training epoch: 124
771
+ [ Thu Sep 15 20:16:58 2022 ] Batch(70/123) done. Loss: 0.0022 lr:0.000100 network_time: 0.0531
772
+ [ Thu Sep 15 20:17:18 2022 ] Eval epoch: 124
773
+ [ Thu Sep 15 20:17:41 2022 ] Mean test loss of 258 batches: 1.924527645111084.
774
+ [ Thu Sep 15 20:17:41 2022 ] Top1: 63.71%
775
+ [ Thu Sep 15 20:17:41 2022 ] Top5: 90.88%
776
+ [ Thu Sep 15 20:17:41 2022 ] Training epoch: 125
777
+ [ Thu Sep 15 20:18:03 2022 ] Batch(47/123) done. Loss: 0.0032 lr:0.000100 network_time: 0.0517
778
+ [ Thu Sep 15 20:18:30 2022 ] Eval epoch: 125
779
+ [ Thu Sep 15 20:18:53 2022 ] Mean test loss of 258 batches: 1.797371745109558.
780
+ [ Thu Sep 15 20:18:53 2022 ] Top1: 64.73%
781
+ [ Thu Sep 15 20:18:53 2022 ] Top5: 91.55%
782
+ [ Thu Sep 15 20:18:53 2022 ] Training epoch: 126
783
+ [ Thu Sep 15 20:19:07 2022 ] Batch(24/123) done. Loss: 0.0054 lr:0.000100 network_time: 0.0543
784
+ [ Thu Sep 15 20:19:42 2022 ] Eval epoch: 126
785
+ [ Thu Sep 15 20:20:06 2022 ] Mean test loss of 258 batches: 1.8714008331298828.
786
+ [ Thu Sep 15 20:20:06 2022 ] Top1: 63.84%
787
+ [ Thu Sep 15 20:20:06 2022 ] Top5: 91.07%
788
+ [ Thu Sep 15 20:20:06 2022 ] Training epoch: 127
789
+ [ Thu Sep 15 20:20:11 2022 ] Batch(1/123) done. Loss: 0.0051 lr:0.000100 network_time: 0.0559
790
+ [ Thu Sep 15 20:20:47 2022 ] Batch(101/123) done. Loss: 0.0022 lr:0.000100 network_time: 0.0472
791
+ [ Thu Sep 15 20:20:55 2022 ] Eval epoch: 127
792
+ [ Thu Sep 15 20:21:18 2022 ] Mean test loss of 258 batches: 1.848433494567871.
793
+ [ Thu Sep 15 20:21:18 2022 ] Top1: 64.39%
794
+ [ Thu Sep 15 20:21:18 2022 ] Top5: 91.31%
795
+ [ Thu Sep 15 20:21:18 2022 ] Training epoch: 128
796
+ [ Thu Sep 15 20:21:52 2022 ] Batch(78/123) done. Loss: 0.0029 lr:0.000100 network_time: 0.0501
797
+ [ Thu Sep 15 20:22:08 2022 ] Eval epoch: 128
798
+ [ Thu Sep 15 20:22:31 2022 ] Mean test loss of 258 batches: 1.849387288093567.
799
+ [ Thu Sep 15 20:22:31 2022 ] Top1: 64.44%
800
+ [ Thu Sep 15 20:22:31 2022 ] Top5: 91.35%
801
+ [ Thu Sep 15 20:22:31 2022 ] Training epoch: 129
802
+ [ Thu Sep 15 20:22:56 2022 ] Batch(55/123) done. Loss: 0.0056 lr:0.000100 network_time: 0.0462
803
+ [ Thu Sep 15 20:23:20 2022 ] Eval epoch: 129
804
+ [ Thu Sep 15 20:23:43 2022 ] Mean test loss of 258 batches: 1.9221100807189941.
805
+ [ Thu Sep 15 20:23:43 2022 ] Top1: 63.66%
806
+ [ Thu Sep 15 20:23:43 2022 ] Top5: 90.82%
807
+ [ Thu Sep 15 20:23:43 2022 ] Training epoch: 130
808
+ [ Thu Sep 15 20:24:00 2022 ] Batch(32/123) done. Loss: 0.0082 lr:0.000100 network_time: 0.0517
809
+ [ Thu Sep 15 20:24:33 2022 ] Eval epoch: 130
810
+ [ Thu Sep 15 20:24:56 2022 ] Mean test loss of 258 batches: 1.9553499221801758.
811
+ [ Thu Sep 15 20:24:56 2022 ] Top1: 63.26%
812
+ [ Thu Sep 15 20:24:56 2022 ] Top5: 90.82%
813
+ [ Thu Sep 15 20:24:56 2022 ] Training epoch: 131
814
+ [ Thu Sep 15 20:25:04 2022 ] Batch(9/123) done. Loss: 0.0023 lr:0.000100 network_time: 0.0526
815
+ [ Thu Sep 15 20:25:40 2022 ] Batch(109/123) done. Loss: 0.0263 lr:0.000100 network_time: 0.0494
816
+ [ Thu Sep 15 20:25:45 2022 ] Eval epoch: 131
817
+ [ Thu Sep 15 20:26:08 2022 ] Mean test loss of 258 batches: 1.9092321395874023.
818
+ [ Thu Sep 15 20:26:08 2022 ] Top1: 63.81%
819
+ [ Thu Sep 15 20:26:08 2022 ] Top5: 91.06%
820
+ [ Thu Sep 15 20:26:08 2022 ] Training epoch: 132
821
+ [ Thu Sep 15 20:26:44 2022 ] Batch(86/123) done. Loss: 0.0040 lr:0.000100 network_time: 0.0515
822
+ [ Thu Sep 15 20:26:57 2022 ] Eval epoch: 132
823
+ [ Thu Sep 15 20:27:20 2022 ] Mean test loss of 258 batches: 1.8571966886520386.
824
+ [ Thu Sep 15 20:27:21 2022 ] Top1: 63.51%
825
+ [ Thu Sep 15 20:27:21 2022 ] Top5: 91.11%
826
+ [ Thu Sep 15 20:27:21 2022 ] Training epoch: 133
827
+ [ Thu Sep 15 20:27:48 2022 ] Batch(63/123) done. Loss: 0.0079 lr:0.000100 network_time: 0.0532
828
+ [ Thu Sep 15 20:28:10 2022 ] Eval epoch: 133
829
+ [ Thu Sep 15 20:28:33 2022 ] Mean test loss of 258 batches: 2.009004831314087.
830
+ [ Thu Sep 15 20:28:33 2022 ] Top1: 62.72%
831
+ [ Thu Sep 15 20:28:33 2022 ] Top5: 90.47%
832
+ [ Thu Sep 15 20:28:33 2022 ] Training epoch: 134
833
+ [ Thu Sep 15 20:28:52 2022 ] Batch(40/123) done. Loss: 0.0086 lr:0.000100 network_time: 0.0507
834
+ [ Thu Sep 15 20:29:23 2022 ] Eval epoch: 134
835
+ [ Thu Sep 15 20:29:46 2022 ] Mean test loss of 258 batches: 1.814934253692627.
836
+ [ Thu Sep 15 20:29:46 2022 ] Top1: 64.21%
837
+ [ Thu Sep 15 20:29:46 2022 ] Top5: 91.35%
838
+ [ Thu Sep 15 20:29:46 2022 ] Training epoch: 135
839
+ [ Thu Sep 15 20:29:57 2022 ] Batch(17/123) done. Loss: 0.0034 lr:0.000100 network_time: 0.0575
840
+ [ Thu Sep 15 20:30:34 2022 ] Batch(117/123) done. Loss: 0.0049 lr:0.000100 network_time: 0.0484
841
+ [ Thu Sep 15 20:30:36 2022 ] Eval epoch: 135
842
+ [ Thu Sep 15 20:30:59 2022 ] Mean test loss of 258 batches: 1.8043664693832397.
843
+ [ Thu Sep 15 20:31:00 2022 ] Top1: 63.80%
844
+ [ Thu Sep 15 20:31:00 2022 ] Top5: 91.04%
845
+ [ Thu Sep 15 20:31:00 2022 ] Training epoch: 136
846
+ [ Thu Sep 15 20:31:39 2022 ] Batch(94/123) done. Loss: 0.0053 lr:0.000100 network_time: 0.0498
847
+ [ Thu Sep 15 20:31:49 2022 ] Eval epoch: 136
848
+ [ Thu Sep 15 20:32:12 2022 ] Mean test loss of 258 batches: 1.9689311981201172.
849
+ [ Thu Sep 15 20:32:12 2022 ] Top1: 63.80%
850
+ [ Thu Sep 15 20:32:12 2022 ] Top5: 90.93%
851
+ [ Thu Sep 15 20:32:12 2022 ] Training epoch: 137
852
+ [ Thu Sep 15 20:32:42 2022 ] Batch(71/123) done. Loss: 0.0085 lr:0.000100 network_time: 0.0516
853
+ [ Thu Sep 15 20:33:01 2022 ] Eval epoch: 137
854
+ [ Thu Sep 15 20:33:25 2022 ] Mean test loss of 258 batches: 1.8842922449111938.
855
+ [ Thu Sep 15 20:33:25 2022 ] Top1: 63.98%
856
+ [ Thu Sep 15 20:33:25 2022 ] Top5: 91.14%
857
+ [ Thu Sep 15 20:33:25 2022 ] Training epoch: 138
858
+ [ Thu Sep 15 20:33:47 2022 ] Batch(48/123) done. Loss: 0.0178 lr:0.000100 network_time: 0.0504
859
+ [ Thu Sep 15 20:34:14 2022 ] Eval epoch: 138
860
+ [ Thu Sep 15 20:34:37 2022 ] Mean test loss of 258 batches: 1.9904453754425049.
861
+ [ Thu Sep 15 20:34:37 2022 ] Top1: 62.78%
862
+ [ Thu Sep 15 20:34:37 2022 ] Top5: 90.56%
863
+ [ Thu Sep 15 20:34:37 2022 ] Training epoch: 139
864
+ [ Thu Sep 15 20:34:51 2022 ] Batch(25/123) done. Loss: 0.0022 lr:0.000100 network_time: 0.0549
865
+ [ Thu Sep 15 20:35:27 2022 ] Eval epoch: 139
866
+ [ Thu Sep 15 20:35:50 2022 ] Mean test loss of 258 batches: 1.9355473518371582.
867
+ [ Thu Sep 15 20:35:50 2022 ] Top1: 63.64%
868
+ [ Thu Sep 15 20:35:50 2022 ] Top5: 90.88%
869
+ [ Thu Sep 15 20:35:50 2022 ] Training epoch: 140
870
+ [ Thu Sep 15 20:35:56 2022 ] Batch(2/123) done. Loss: 0.0047 lr:0.000100 network_time: 0.0507
871
+ [ Thu Sep 15 20:36:33 2022 ] Batch(102/123) done. Loss: 0.0064 lr:0.000100 network_time: 0.0545
872
+ [ Thu Sep 15 20:36:41 2022 ] Eval epoch: 140
873
+ [ Thu Sep 15 20:37:04 2022 ] Mean test loss of 258 batches: 1.9572222232818604.
874
+ [ Thu Sep 15 20:37:04 2022 ] Top1: 62.53%
875
+ [ Thu Sep 15 20:37:04 2022 ] Top5: 90.63%
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_motion_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/config.yaml ADDED
@@ -0,0 +1,58 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu_ShiftGCN_bone_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/nturgbd-cross-subject/train_bone.yaml
5
+ device:
6
+ - 0
7
+ - 1
8
+ - 2
9
+ - 3
10
+ eval_interval: 5
11
+ feeder: feeders.feeder.Feeder
12
+ ignore_weights: []
13
+ log_interval: 100
14
+ model: model.shift_gcn.Model
15
+ model_args:
16
+ graph: graph.ntu_rgb_d.Graph
17
+ graph_args:
18
+ labeling_mode: spatial
19
+ num_class: 60
20
+ num_person: 2
21
+ num_point: 25
22
+ model_saved_name: ./save_models/ntu_ShiftGCN_bone_xsub
23
+ nesterov: true
24
+ num_epoch: 140
25
+ num_worker: 32
26
+ only_train_epoch: 1
27
+ only_train_part: true
28
+ optimizer: SGD
29
+ phase: train
30
+ print_log: true
31
+ save_interval: 2
32
+ save_score: false
33
+ seed: 1
34
+ show_topk:
35
+ - 1
36
+ - 5
37
+ start_epoch: 0
38
+ step:
39
+ - 60
40
+ - 80
41
+ - 100
42
+ test_batch_size: 64
43
+ test_feeder_args:
44
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy
45
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
46
+ train_feeder_args:
47
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy
48
+ debug: false
49
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
50
+ normalization: false
51
+ random_choose: false
52
+ random_move: false
53
+ random_shift: false
54
+ window_size: -1
55
+ warm_up_epoch: 0
56
+ weight_decay: 0.0001
57
+ weights: null
58
+ work_dir: ./work_dir/ntu_ShiftGCN_bone_xsub
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7d5bce80cdeecce4cce3300b402719cd25c45d5d8530e964406d10576c3a0f35
3
+ size 4979902
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/log.txt ADDED
@@ -0,0 +1,875 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 15 17:47:53 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu_ShiftGCN_bone_xsub', 'model_saved_name': './save_models/ntu_ShiftGCN_bone_xsub', 'Experiment_name': 'ntu_ShiftGCN_bone_xsub', 'config': './config/nturgbd-cross-subject/train_bone.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [0, 1, 2, 3], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Thu Sep 15 17:47:53 2022 ] Training epoch: 1
5
+ [ Thu Sep 15 17:48:40 2022 ] Batch(99/123) done. Loss: 2.3813 lr:0.100000 network_time: 0.0478
6
+ [ Thu Sep 15 17:48:49 2022 ] Eval epoch: 1
7
+ [ Thu Sep 15 17:49:11 2022 ] Mean test loss of 258 batches: 5.7082133293151855.
8
+ [ Thu Sep 15 17:49:11 2022 ] Top1: 10.85%
9
+ [ Thu Sep 15 17:49:11 2022 ] Top5: 36.14%
10
+ [ Thu Sep 15 17:49:11 2022 ] Training epoch: 2
11
+ [ Thu Sep 15 17:49:43 2022 ] Batch(76/123) done. Loss: 2.1921 lr:0.100000 network_time: 0.0540
12
+ [ Thu Sep 15 17:50:01 2022 ] Eval epoch: 2
13
+ [ Thu Sep 15 17:50:23 2022 ] Mean test loss of 258 batches: 6.640047550201416.
14
+ [ Thu Sep 15 17:50:23 2022 ] Top1: 21.72%
15
+ [ Thu Sep 15 17:50:23 2022 ] Top5: 51.43%
16
+ [ Thu Sep 15 17:50:23 2022 ] Training epoch: 3
17
+ [ Thu Sep 15 17:50:46 2022 ] Batch(53/123) done. Loss: 1.6533 lr:0.100000 network_time: 0.0488
18
+ [ Thu Sep 15 17:51:12 2022 ] Eval epoch: 3
19
+ [ Thu Sep 15 17:51:34 2022 ] Mean test loss of 258 batches: 3.587632656097412.
20
+ [ Thu Sep 15 17:51:34 2022 ] Top1: 26.24%
21
+ [ Thu Sep 15 17:51:34 2022 ] Top5: 60.65%
22
+ [ Thu Sep 15 17:51:34 2022 ] Training epoch: 4
23
+ [ Thu Sep 15 17:51:49 2022 ] Batch(30/123) done. Loss: 1.6216 lr:0.100000 network_time: 0.0511
24
+ [ Thu Sep 15 17:52:23 2022 ] Eval epoch: 4
25
+ [ Thu Sep 15 17:52:45 2022 ] Mean test loss of 258 batches: 2.7592902183532715.
26
+ [ Thu Sep 15 17:52:45 2022 ] Top1: 34.18%
27
+ [ Thu Sep 15 17:52:46 2022 ] Top5: 68.90%
28
+ [ Thu Sep 15 17:52:46 2022 ] Training epoch: 5
29
+ [ Thu Sep 15 17:52:52 2022 ] Batch(7/123) done. Loss: 1.1457 lr:0.100000 network_time: 0.0478
30
+ [ Thu Sep 15 17:53:29 2022 ] Batch(107/123) done. Loss: 1.0360 lr:0.100000 network_time: 0.0486
31
+ [ Thu Sep 15 17:53:35 2022 ] Eval epoch: 5
32
+ [ Thu Sep 15 17:53:56 2022 ] Mean test loss of 258 batches: 3.083068370819092.
33
+ [ Thu Sep 15 17:53:57 2022 ] Top1: 35.23%
34
+ [ Thu Sep 15 17:53:57 2022 ] Top5: 65.32%
35
+ [ Thu Sep 15 17:53:57 2022 ] Training epoch: 6
36
+ [ Thu Sep 15 17:54:32 2022 ] Batch(84/123) done. Loss: 1.1396 lr:0.100000 network_time: 0.0504
37
+ [ Thu Sep 15 17:54:46 2022 ] Eval epoch: 6
38
+ [ Thu Sep 15 17:55:08 2022 ] Mean test loss of 258 batches: 3.323880434036255.
39
+ [ Thu Sep 15 17:55:08 2022 ] Top1: 35.82%
40
+ [ Thu Sep 15 17:55:08 2022 ] Top5: 64.31%
41
+ [ Thu Sep 15 17:55:08 2022 ] Training epoch: 7
42
+ [ Thu Sep 15 17:55:35 2022 ] Batch(61/123) done. Loss: 0.8685 lr:0.100000 network_time: 0.0488
43
+ [ Thu Sep 15 17:55:58 2022 ] Eval epoch: 7
44
+ [ Thu Sep 15 17:56:20 2022 ] Mean test loss of 258 batches: 3.480191230773926.
45
+ [ Thu Sep 15 17:56:20 2022 ] Top1: 32.70%
46
+ [ Thu Sep 15 17:56:20 2022 ] Top5: 62.88%
47
+ [ Thu Sep 15 17:56:20 2022 ] Training epoch: 8
48
+ [ Thu Sep 15 17:56:38 2022 ] Batch(38/123) done. Loss: 1.0288 lr:0.100000 network_time: 0.0512
49
+ [ Thu Sep 15 17:57:09 2022 ] Eval epoch: 8
50
+ [ Thu Sep 15 17:57:32 2022 ] Mean test loss of 258 batches: 2.307069778442383.
51
+ [ Thu Sep 15 17:57:32 2022 ] Top1: 40.67%
52
+ [ Thu Sep 15 17:57:32 2022 ] Top5: 77.36%
53
+ [ Thu Sep 15 17:57:32 2022 ] Training epoch: 9
54
+ [ Thu Sep 15 17:57:42 2022 ] Batch(15/123) done. Loss: 0.9701 lr:0.100000 network_time: 0.0513
55
+ [ Thu Sep 15 17:58:18 2022 ] Batch(115/123) done. Loss: 1.0193 lr:0.100000 network_time: 0.0488
56
+ [ Thu Sep 15 17:58:21 2022 ] Eval epoch: 9
57
+ [ Thu Sep 15 17:58:43 2022 ] Mean test loss of 258 batches: 2.7951412200927734.
58
+ [ Thu Sep 15 17:58:43 2022 ] Top1: 37.25%
59
+ [ Thu Sep 15 17:58:43 2022 ] Top5: 70.56%
60
+ [ Thu Sep 15 17:58:43 2022 ] Training epoch: 10
61
+ [ Thu Sep 15 17:59:21 2022 ] Batch(92/123) done. Loss: 1.0893 lr:0.100000 network_time: 0.0515
62
+ [ Thu Sep 15 17:59:32 2022 ] Eval epoch: 10
63
+ [ Thu Sep 15 17:59:55 2022 ] Mean test loss of 258 batches: 2.059424638748169.
64
+ [ Thu Sep 15 17:59:55 2022 ] Top1: 47.10%
65
+ [ Thu Sep 15 17:59:55 2022 ] Top5: 80.37%
66
+ [ Thu Sep 15 17:59:55 2022 ] Training epoch: 11
67
+ [ Thu Sep 15 18:00:24 2022 ] Batch(69/123) done. Loss: 0.7402 lr:0.100000 network_time: 0.0520
68
+ [ Thu Sep 15 18:00:44 2022 ] Eval epoch: 11
69
+ [ Thu Sep 15 18:01:06 2022 ] Mean test loss of 258 batches: 2.2376978397369385.
70
+ [ Thu Sep 15 18:01:06 2022 ] Top1: 45.21%
71
+ [ Thu Sep 15 18:01:07 2022 ] Top5: 79.05%
72
+ [ Thu Sep 15 18:01:07 2022 ] Training epoch: 12
73
+ [ Thu Sep 15 18:01:28 2022 ] Batch(46/123) done. Loss: 0.5997 lr:0.100000 network_time: 0.0502
74
+ [ Thu Sep 15 18:01:56 2022 ] Eval epoch: 12
75
+ [ Thu Sep 15 18:02:18 2022 ] Mean test loss of 258 batches: 2.544092893600464.
76
+ [ Thu Sep 15 18:02:18 2022 ] Top1: 44.84%
77
+ [ Thu Sep 15 18:02:18 2022 ] Top5: 79.83%
78
+ [ Thu Sep 15 18:02:18 2022 ] Training epoch: 13
79
+ [ Thu Sep 15 18:02:31 2022 ] Batch(23/123) done. Loss: 0.9503 lr:0.100000 network_time: 0.0497
80
+ [ Thu Sep 15 18:03:07 2022 ] Eval epoch: 13
81
+ [ Thu Sep 15 18:03:29 2022 ] Mean test loss of 258 batches: 1.9147826433181763.
82
+ [ Thu Sep 15 18:03:29 2022 ] Top1: 51.48%
83
+ [ Thu Sep 15 18:03:29 2022 ] Top5: 83.05%
84
+ [ Thu Sep 15 18:03:29 2022 ] Training epoch: 14
85
+ [ Thu Sep 15 18:03:34 2022 ] Batch(0/123) done. Loss: 0.5457 lr:0.100000 network_time: 0.1016
86
+ [ Thu Sep 15 18:04:10 2022 ] Batch(100/123) done. Loss: 0.5821 lr:0.100000 network_time: 0.0494
87
+ [ Thu Sep 15 18:04:19 2022 ] Eval epoch: 14
88
+ [ Thu Sep 15 18:04:41 2022 ] Mean test loss of 258 batches: 3.2059364318847656.
89
+ [ Thu Sep 15 18:04:41 2022 ] Top1: 40.05%
90
+ [ Thu Sep 15 18:04:41 2022 ] Top5: 73.72%
91
+ [ Thu Sep 15 18:04:42 2022 ] Training epoch: 15
92
+ [ Thu Sep 15 18:05:14 2022 ] Batch(77/123) done. Loss: 0.7532 lr:0.100000 network_time: 0.0495
93
+ [ Thu Sep 15 18:05:31 2022 ] Eval epoch: 15
94
+ [ Thu Sep 15 18:05:52 2022 ] Mean test loss of 258 batches: 2.295565605163574.
95
+ [ Thu Sep 15 18:05:52 2022 ] Top1: 44.93%
96
+ [ Thu Sep 15 18:05:53 2022 ] Top5: 81.57%
97
+ [ Thu Sep 15 18:05:53 2022 ] Training epoch: 16
98
+ [ Thu Sep 15 18:06:17 2022 ] Batch(54/123) done. Loss: 0.7355 lr:0.100000 network_time: 0.0494
99
+ [ Thu Sep 15 18:06:42 2022 ] Eval epoch: 16
100
+ [ Thu Sep 15 18:07:04 2022 ] Mean test loss of 258 batches: 2.1258904933929443.
101
+ [ Thu Sep 15 18:07:04 2022 ] Top1: 49.40%
102
+ [ Thu Sep 15 18:07:04 2022 ] Top5: 83.77%
103
+ [ Thu Sep 15 18:07:04 2022 ] Training epoch: 17
104
+ [ Thu Sep 15 18:07:19 2022 ] Batch(31/123) done. Loss: 0.6526 lr:0.100000 network_time: 0.0500
105
+ [ Thu Sep 15 18:07:53 2022 ] Eval epoch: 17
106
+ [ Thu Sep 15 18:08:15 2022 ] Mean test loss of 258 batches: 2.0040574073791504.
107
+ [ Thu Sep 15 18:08:15 2022 ] Top1: 50.99%
108
+ [ Thu Sep 15 18:08:15 2022 ] Top5: 84.38%
109
+ [ Thu Sep 15 18:08:15 2022 ] Training epoch: 18
110
+ [ Thu Sep 15 18:08:22 2022 ] Batch(8/123) done. Loss: 0.4405 lr:0.100000 network_time: 0.0521
111
+ [ Thu Sep 15 18:08:59 2022 ] Batch(108/123) done. Loss: 0.3498 lr:0.100000 network_time: 0.0497
112
+ [ Thu Sep 15 18:09:05 2022 ] Eval epoch: 18
113
+ [ Thu Sep 15 18:09:26 2022 ] Mean test loss of 258 batches: 2.170915365219116.
114
+ [ Thu Sep 15 18:09:26 2022 ] Top1: 51.73%
115
+ [ Thu Sep 15 18:09:26 2022 ] Top5: 83.59%
116
+ [ Thu Sep 15 18:09:26 2022 ] Training epoch: 19
117
+ [ Thu Sep 15 18:10:02 2022 ] Batch(85/123) done. Loss: 0.4577 lr:0.100000 network_time: 0.0501
118
+ [ Thu Sep 15 18:10:16 2022 ] Eval epoch: 19
119
+ [ Thu Sep 15 18:10:37 2022 ] Mean test loss of 258 batches: 2.4165236949920654.
120
+ [ Thu Sep 15 18:10:38 2022 ] Top1: 48.97%
121
+ [ Thu Sep 15 18:10:38 2022 ] Top5: 81.39%
122
+ [ Thu Sep 15 18:10:38 2022 ] Training epoch: 20
123
+ [ Thu Sep 15 18:11:04 2022 ] Batch(62/123) done. Loss: 0.4567 lr:0.100000 network_time: 0.0480
124
+ [ Thu Sep 15 18:11:27 2022 ] Eval epoch: 20
125
+ [ Thu Sep 15 18:11:49 2022 ] Mean test loss of 258 batches: 2.2029035091400146.
126
+ [ Thu Sep 15 18:11:49 2022 ] Top1: 48.95%
127
+ [ Thu Sep 15 18:11:49 2022 ] Top5: 83.41%
128
+ [ Thu Sep 15 18:11:49 2022 ] Training epoch: 21
129
+ [ Thu Sep 15 18:12:07 2022 ] Batch(39/123) done. Loss: 0.5271 lr:0.100000 network_time: 0.0502
130
+ [ Thu Sep 15 18:12:38 2022 ] Eval epoch: 21
131
+ [ Thu Sep 15 18:13:00 2022 ] Mean test loss of 258 batches: 2.533735990524292.
132
+ [ Thu Sep 15 18:13:00 2022 ] Top1: 45.28%
133
+ [ Thu Sep 15 18:13:01 2022 ] Top5: 79.87%
134
+ [ Thu Sep 15 18:13:01 2022 ] Training epoch: 22
135
+ [ Thu Sep 15 18:13:11 2022 ] Batch(16/123) done. Loss: 0.3980 lr:0.100000 network_time: 0.0473
136
+ [ Thu Sep 15 18:13:48 2022 ] Batch(116/123) done. Loss: 0.5138 lr:0.100000 network_time: 0.0502
137
+ [ Thu Sep 15 18:13:50 2022 ] Eval epoch: 22
138
+ [ Thu Sep 15 18:14:12 2022 ] Mean test loss of 258 batches: 2.2566044330596924.
139
+ [ Thu Sep 15 18:14:12 2022 ] Top1: 49.99%
140
+ [ Thu Sep 15 18:14:12 2022 ] Top5: 83.68%
141
+ [ Thu Sep 15 18:14:12 2022 ] Training epoch: 23
142
+ [ Thu Sep 15 18:14:50 2022 ] Batch(93/123) done. Loss: 0.8055 lr:0.100000 network_time: 0.0482
143
+ [ Thu Sep 15 18:15:01 2022 ] Eval epoch: 23
144
+ [ Thu Sep 15 18:15:22 2022 ] Mean test loss of 258 batches: 2.1785130500793457.
145
+ [ Thu Sep 15 18:15:22 2022 ] Top1: 51.98%
146
+ [ Thu Sep 15 18:15:23 2022 ] Top5: 83.66%
147
+ [ Thu Sep 15 18:15:23 2022 ] Training epoch: 24
148
+ [ Thu Sep 15 18:15:52 2022 ] Batch(70/123) done. Loss: 0.3882 lr:0.100000 network_time: 0.0521
149
+ [ Thu Sep 15 18:16:12 2022 ] Eval epoch: 24
150
+ [ Thu Sep 15 18:16:34 2022 ] Mean test loss of 258 batches: 2.931326389312744.
151
+ [ Thu Sep 15 18:16:34 2022 ] Top1: 44.62%
152
+ [ Thu Sep 15 18:16:34 2022 ] Top5: 76.93%
153
+ [ Thu Sep 15 18:16:34 2022 ] Training epoch: 25
154
+ [ Thu Sep 15 18:16:55 2022 ] Batch(47/123) done. Loss: 0.4080 lr:0.100000 network_time: 0.0516
155
+ [ Thu Sep 15 18:17:23 2022 ] Eval epoch: 25
156
+ [ Thu Sep 15 18:17:45 2022 ] Mean test loss of 258 batches: 1.9534658193588257.
157
+ [ Thu Sep 15 18:17:45 2022 ] Top1: 54.25%
158
+ [ Thu Sep 15 18:17:45 2022 ] Top5: 84.85%
159
+ [ Thu Sep 15 18:17:45 2022 ] Training epoch: 26
160
+ [ Thu Sep 15 18:17:58 2022 ] Batch(24/123) done. Loss: 0.4425 lr:0.100000 network_time: 0.0469
161
+ [ Thu Sep 15 18:18:34 2022 ] Eval epoch: 26
162
+ [ Thu Sep 15 18:18:56 2022 ] Mean test loss of 258 batches: 2.3278818130493164.
163
+ [ Thu Sep 15 18:18:56 2022 ] Top1: 50.68%
164
+ [ Thu Sep 15 18:18:57 2022 ] Top5: 83.35%
165
+ [ Thu Sep 15 18:18:57 2022 ] Training epoch: 27
166
+ [ Thu Sep 15 18:19:01 2022 ] Batch(1/123) done. Loss: 0.2602 lr:0.100000 network_time: 0.0477
167
+ [ Thu Sep 15 18:19:38 2022 ] Batch(101/123) done. Loss: 0.4489 lr:0.100000 network_time: 0.0506
168
+ [ Thu Sep 15 18:19:46 2022 ] Eval epoch: 27
169
+ [ Thu Sep 15 18:20:08 2022 ] Mean test loss of 258 batches: 1.912603735923767.
170
+ [ Thu Sep 15 18:20:08 2022 ] Top1: 54.53%
171
+ [ Thu Sep 15 18:20:08 2022 ] Top5: 85.32%
172
+ [ Thu Sep 15 18:20:08 2022 ] Training epoch: 28
173
+ [ Thu Sep 15 18:20:41 2022 ] Batch(78/123) done. Loss: 0.5833 lr:0.100000 network_time: 0.0513
174
+ [ Thu Sep 15 18:20:57 2022 ] Eval epoch: 28
175
+ [ Thu Sep 15 18:21:19 2022 ] Mean test loss of 258 batches: 2.167754650115967.
176
+ [ Thu Sep 15 18:21:19 2022 ] Top1: 55.58%
177
+ [ Thu Sep 15 18:21:19 2022 ] Top5: 86.30%
178
+ [ Thu Sep 15 18:21:19 2022 ] Training epoch: 29
179
+ [ Thu Sep 15 18:21:44 2022 ] Batch(55/123) done. Loss: 0.3940 lr:0.100000 network_time: 0.0516
180
+ [ Thu Sep 15 18:22:09 2022 ] Eval epoch: 29
181
+ [ Thu Sep 15 18:22:30 2022 ] Mean test loss of 258 batches: 2.0642311573028564.
182
+ [ Thu Sep 15 18:22:31 2022 ] Top1: 55.52%
183
+ [ Thu Sep 15 18:22:31 2022 ] Top5: 86.89%
184
+ [ Thu Sep 15 18:22:31 2022 ] Training epoch: 30
185
+ [ Thu Sep 15 18:22:47 2022 ] Batch(32/123) done. Loss: 0.4275 lr:0.100000 network_time: 0.0494
186
+ [ Thu Sep 15 18:23:20 2022 ] Eval epoch: 30
187
+ [ Thu Sep 15 18:23:42 2022 ] Mean test loss of 258 batches: 1.9236493110656738.
188
+ [ Thu Sep 15 18:23:42 2022 ] Top1: 55.09%
189
+ [ Thu Sep 15 18:23:42 2022 ] Top5: 84.92%
190
+ [ Thu Sep 15 18:23:42 2022 ] Training epoch: 31
191
+ [ Thu Sep 15 18:23:50 2022 ] Batch(9/123) done. Loss: 0.2980 lr:0.100000 network_time: 0.0496
192
+ [ Thu Sep 15 18:24:27 2022 ] Batch(109/123) done. Loss: 0.3614 lr:0.100000 network_time: 0.0495
193
+ [ Thu Sep 15 18:24:32 2022 ] Eval epoch: 31
194
+ [ Thu Sep 15 18:24:54 2022 ] Mean test loss of 258 batches: 2.2241408824920654.
195
+ [ Thu Sep 15 18:24:54 2022 ] Top1: 52.15%
196
+ [ Thu Sep 15 18:24:54 2022 ] Top5: 85.18%
197
+ [ Thu Sep 15 18:24:54 2022 ] Training epoch: 32
198
+ [ Thu Sep 15 18:25:30 2022 ] Batch(86/123) done. Loss: 0.4021 lr:0.100000 network_time: 0.0503
199
+ [ Thu Sep 15 18:25:43 2022 ] Eval epoch: 32
200
+ [ Thu Sep 15 18:26:05 2022 ] Mean test loss of 258 batches: 1.9539873600006104.
201
+ [ Thu Sep 15 18:26:05 2022 ] Top1: 51.09%
202
+ [ Thu Sep 15 18:26:05 2022 ] Top5: 84.44%
203
+ [ Thu Sep 15 18:26:05 2022 ] Training epoch: 33
204
+ [ Thu Sep 15 18:26:32 2022 ] Batch(63/123) done. Loss: 0.3629 lr:0.100000 network_time: 0.0688
205
+ [ Thu Sep 15 18:26:54 2022 ] Eval epoch: 33
206
+ [ Thu Sep 15 18:27:16 2022 ] Mean test loss of 258 batches: 2.223938465118408.
207
+ [ Thu Sep 15 18:27:16 2022 ] Top1: 53.82%
208
+ [ Thu Sep 15 18:27:17 2022 ] Top5: 84.55%
209
+ [ Thu Sep 15 18:27:17 2022 ] Training epoch: 34
210
+ [ Thu Sep 15 18:27:35 2022 ] Batch(40/123) done. Loss: 0.3019 lr:0.100000 network_time: 0.0506
211
+ [ Thu Sep 15 18:28:06 2022 ] Eval epoch: 34
212
+ [ Thu Sep 15 18:28:28 2022 ] Mean test loss of 258 batches: 2.304352283477783.
213
+ [ Thu Sep 15 18:28:28 2022 ] Top1: 52.24%
214
+ [ Thu Sep 15 18:28:28 2022 ] Top5: 83.38%
215
+ [ Thu Sep 15 18:28:28 2022 ] Training epoch: 35
216
+ [ Thu Sep 15 18:28:38 2022 ] Batch(17/123) done. Loss: 0.2968 lr:0.100000 network_time: 0.0554
217
+ [ Thu Sep 15 18:29:15 2022 ] Batch(117/123) done. Loss: 0.3346 lr:0.100000 network_time: 0.0500
218
+ [ Thu Sep 15 18:29:17 2022 ] Eval epoch: 35
219
+ [ Thu Sep 15 18:29:39 2022 ] Mean test loss of 258 batches: 2.392214298248291.
220
+ [ Thu Sep 15 18:29:40 2022 ] Top1: 52.00%
221
+ [ Thu Sep 15 18:29:40 2022 ] Top5: 82.93%
222
+ [ Thu Sep 15 18:29:40 2022 ] Training epoch: 36
223
+ [ Thu Sep 15 18:30:18 2022 ] Batch(94/123) done. Loss: 0.2313 lr:0.100000 network_time: 0.0524
224
+ [ Thu Sep 15 18:30:29 2022 ] Eval epoch: 36
225
+ [ Thu Sep 15 18:30:51 2022 ] Mean test loss of 258 batches: 2.248171806335449.
226
+ [ Thu Sep 15 18:30:51 2022 ] Top1: 52.39%
227
+ [ Thu Sep 15 18:30:51 2022 ] Top5: 84.59%
228
+ [ Thu Sep 15 18:30:51 2022 ] Training epoch: 37
229
+ [ Thu Sep 15 18:31:21 2022 ] Batch(71/123) done. Loss: 0.2396 lr:0.100000 network_time: 0.0512
230
+ [ Thu Sep 15 18:31:40 2022 ] Eval epoch: 37
231
+ [ Thu Sep 15 18:32:02 2022 ] Mean test loss of 258 batches: 2.4701759815216064.
232
+ [ Thu Sep 15 18:32:02 2022 ] Top1: 52.17%
233
+ [ Thu Sep 15 18:32:02 2022 ] Top5: 85.15%
234
+ [ Thu Sep 15 18:32:02 2022 ] Training epoch: 38
235
+ [ Thu Sep 15 18:32:24 2022 ] Batch(48/123) done. Loss: 0.1601 lr:0.100000 network_time: 0.0497
236
+ [ Thu Sep 15 18:32:52 2022 ] Eval epoch: 38
237
+ [ Thu Sep 15 18:33:13 2022 ] Mean test loss of 258 batches: 2.625194549560547.
238
+ [ Thu Sep 15 18:33:13 2022 ] Top1: 49.11%
239
+ [ Thu Sep 15 18:33:13 2022 ] Top5: 82.32%
240
+ [ Thu Sep 15 18:33:13 2022 ] Training epoch: 39
241
+ [ Thu Sep 15 18:33:26 2022 ] Batch(25/123) done. Loss: 0.2385 lr:0.100000 network_time: 0.0491
242
+ [ Thu Sep 15 18:34:02 2022 ] Eval epoch: 39
243
+ [ Thu Sep 15 18:34:24 2022 ] Mean test loss of 258 batches: 2.2160768508911133.
244
+ [ Thu Sep 15 18:34:24 2022 ] Top1: 52.51%
245
+ [ Thu Sep 15 18:34:24 2022 ] Top5: 85.21%
246
+ [ Thu Sep 15 18:34:24 2022 ] Training epoch: 40
247
+ [ Thu Sep 15 18:34:29 2022 ] Batch(2/123) done. Loss: 0.2694 lr:0.100000 network_time: 0.0741
248
+ [ Thu Sep 15 18:35:05 2022 ] Batch(102/123) done. Loss: 0.4774 lr:0.100000 network_time: 0.0531
249
+ [ Thu Sep 15 18:35:13 2022 ] Eval epoch: 40
250
+ [ Thu Sep 15 18:35:35 2022 ] Mean test loss of 258 batches: 2.8346052169799805.
251
+ [ Thu Sep 15 18:35:35 2022 ] Top1: 45.68%
252
+ [ Thu Sep 15 18:35:35 2022 ] Top5: 77.05%
253
+ [ Thu Sep 15 18:35:35 2022 ] Training epoch: 41
254
+ [ Thu Sep 15 18:36:08 2022 ] Batch(79/123) done. Loss: 0.2311 lr:0.100000 network_time: 0.0525
255
+ [ Thu Sep 15 18:36:24 2022 ] Eval epoch: 41
256
+ [ Thu Sep 15 18:36:47 2022 ] Mean test loss of 258 batches: 2.6487550735473633.
257
+ [ Thu Sep 15 18:36:47 2022 ] Top1: 48.83%
258
+ [ Thu Sep 15 18:36:47 2022 ] Top5: 81.41%
259
+ [ Thu Sep 15 18:36:47 2022 ] Training epoch: 42
260
+ [ Thu Sep 15 18:37:11 2022 ] Batch(56/123) done. Loss: 0.1981 lr:0.100000 network_time: 0.0571
261
+ [ Thu Sep 15 18:37:36 2022 ] Eval epoch: 42
262
+ [ Thu Sep 15 18:37:58 2022 ] Mean test loss of 258 batches: 2.2918591499328613.
263
+ [ Thu Sep 15 18:37:58 2022 ] Top1: 51.79%
264
+ [ Thu Sep 15 18:37:58 2022 ] Top5: 82.48%
265
+ [ Thu Sep 15 18:37:58 2022 ] Training epoch: 43
266
+ [ Thu Sep 15 18:38:14 2022 ] Batch(33/123) done. Loss: 0.2031 lr:0.100000 network_time: 0.0490
267
+ [ Thu Sep 15 18:38:47 2022 ] Eval epoch: 43
268
+ [ Thu Sep 15 18:39:09 2022 ] Mean test loss of 258 batches: 2.716893196105957.
269
+ [ Thu Sep 15 18:39:09 2022 ] Top1: 49.04%
270
+ [ Thu Sep 15 18:39:09 2022 ] Top5: 82.11%
271
+ [ Thu Sep 15 18:39:09 2022 ] Training epoch: 44
272
+ [ Thu Sep 15 18:39:16 2022 ] Batch(10/123) done. Loss: 0.2014 lr:0.100000 network_time: 0.0465
273
+ [ Thu Sep 15 18:39:53 2022 ] Batch(110/123) done. Loss: 0.2128 lr:0.100000 network_time: 0.0488
274
+ [ Thu Sep 15 18:39:58 2022 ] Eval epoch: 44
275
+ [ Thu Sep 15 18:40:20 2022 ] Mean test loss of 258 batches: 2.1684532165527344.
276
+ [ Thu Sep 15 18:40:20 2022 ] Top1: 55.17%
277
+ [ Thu Sep 15 18:40:20 2022 ] Top5: 86.61%
278
+ [ Thu Sep 15 18:40:20 2022 ] Training epoch: 45
279
+ [ Thu Sep 15 18:40:55 2022 ] Batch(87/123) done. Loss: 0.1993 lr:0.100000 network_time: 0.0519
280
+ [ Thu Sep 15 18:41:09 2022 ] Eval epoch: 45
281
+ [ Thu Sep 15 18:41:31 2022 ] Mean test loss of 258 batches: 2.9825515747070312.
282
+ [ Thu Sep 15 18:41:31 2022 ] Top1: 47.93%
283
+ [ Thu Sep 15 18:41:31 2022 ] Top5: 80.66%
284
+ [ Thu Sep 15 18:41:31 2022 ] Training epoch: 46
285
+ [ Thu Sep 15 18:41:58 2022 ] Batch(64/123) done. Loss: 0.2144 lr:0.100000 network_time: 0.0504
286
+ [ Thu Sep 15 18:42:20 2022 ] Eval epoch: 46
287
+ [ Thu Sep 15 18:42:42 2022 ] Mean test loss of 258 batches: 2.1467137336730957.
288
+ [ Thu Sep 15 18:42:42 2022 ] Top1: 54.75%
289
+ [ Thu Sep 15 18:42:42 2022 ] Top5: 85.50%
290
+ [ Thu Sep 15 18:42:42 2022 ] Training epoch: 47
291
+ [ Thu Sep 15 18:43:01 2022 ] Batch(41/123) done. Loss: 0.3797 lr:0.100000 network_time: 0.0513
292
+ [ Thu Sep 15 18:43:31 2022 ] Eval epoch: 47
293
+ [ Thu Sep 15 18:43:53 2022 ] Mean test loss of 258 batches: 2.668498992919922.
294
+ [ Thu Sep 15 18:43:53 2022 ] Top1: 49.12%
295
+ [ Thu Sep 15 18:43:53 2022 ] Top5: 82.49%
296
+ [ Thu Sep 15 18:43:54 2022 ] Training epoch: 48
297
+ [ Thu Sep 15 18:44:04 2022 ] Batch(18/123) done. Loss: 0.1254 lr:0.100000 network_time: 0.0476
298
+ [ Thu Sep 15 18:44:41 2022 ] Batch(118/123) done. Loss: 0.2008 lr:0.100000 network_time: 0.0497
299
+ [ Thu Sep 15 18:44:42 2022 ] Eval epoch: 48
300
+ [ Thu Sep 15 18:45:04 2022 ] Mean test loss of 258 batches: 2.8411271572113037.
301
+ [ Thu Sep 15 18:45:04 2022 ] Top1: 48.40%
302
+ [ Thu Sep 15 18:45:05 2022 ] Top5: 79.99%
303
+ [ Thu Sep 15 18:45:05 2022 ] Training epoch: 49
304
+ [ Thu Sep 15 18:45:43 2022 ] Batch(95/123) done. Loss: 0.3122 lr:0.100000 network_time: 0.0526
305
+ [ Thu Sep 15 18:45:53 2022 ] Eval epoch: 49
306
+ [ Thu Sep 15 18:46:16 2022 ] Mean test loss of 258 batches: 2.817328929901123.
307
+ [ Thu Sep 15 18:46:16 2022 ] Top1: 49.04%
308
+ [ Thu Sep 15 18:46:16 2022 ] Top5: 82.11%
309
+ [ Thu Sep 15 18:46:16 2022 ] Training epoch: 50
310
+ [ Thu Sep 15 18:46:47 2022 ] Batch(72/123) done. Loss: 0.2220 lr:0.100000 network_time: 0.0502
311
+ [ Thu Sep 15 18:47:05 2022 ] Eval epoch: 50
312
+ [ Thu Sep 15 18:47:28 2022 ] Mean test loss of 258 batches: 2.002220869064331.
313
+ [ Thu Sep 15 18:47:28 2022 ] Top1: 56.75%
314
+ [ Thu Sep 15 18:47:28 2022 ] Top5: 88.09%
315
+ [ Thu Sep 15 18:47:28 2022 ] Training epoch: 51
316
+ [ Thu Sep 15 18:47:50 2022 ] Batch(49/123) done. Loss: 0.2155 lr:0.100000 network_time: 0.0476
317
+ [ Thu Sep 15 18:48:17 2022 ] Eval epoch: 51
318
+ [ Thu Sep 15 18:48:40 2022 ] Mean test loss of 258 batches: 2.272024154663086.
319
+ [ Thu Sep 15 18:48:40 2022 ] Top1: 53.65%
320
+ [ Thu Sep 15 18:48:40 2022 ] Top5: 83.74%
321
+ [ Thu Sep 15 18:48:40 2022 ] Training epoch: 52
322
+ [ Thu Sep 15 18:48:54 2022 ] Batch(26/123) done. Loss: 0.2241 lr:0.100000 network_time: 0.0483
323
+ [ Thu Sep 15 18:49:29 2022 ] Eval epoch: 52
324
+ [ Thu Sep 15 18:49:52 2022 ] Mean test loss of 258 batches: 2.388519763946533.
325
+ [ Thu Sep 15 18:49:52 2022 ] Top1: 54.24%
326
+ [ Thu Sep 15 18:49:52 2022 ] Top5: 84.73%
327
+ [ Thu Sep 15 18:49:52 2022 ] Training epoch: 53
328
+ [ Thu Sep 15 18:49:57 2022 ] Batch(3/123) done. Loss: 0.2244 lr:0.100000 network_time: 0.0548
329
+ [ Thu Sep 15 18:50:34 2022 ] Batch(103/123) done. Loss: 0.3865 lr:0.100000 network_time: 0.0536
330
+ [ Thu Sep 15 18:50:41 2022 ] Eval epoch: 53
331
+ [ Thu Sep 15 18:51:03 2022 ] Mean test loss of 258 batches: 2.6002821922302246.
332
+ [ Thu Sep 15 18:51:03 2022 ] Top1: 51.43%
333
+ [ Thu Sep 15 18:51:03 2022 ] Top5: 81.96%
334
+ [ Thu Sep 15 18:51:03 2022 ] Training epoch: 54
335
+ [ Thu Sep 15 18:51:37 2022 ] Batch(80/123) done. Loss: 0.3344 lr:0.100000 network_time: 0.0506
336
+ [ Thu Sep 15 18:51:52 2022 ] Eval epoch: 54
337
+ [ Thu Sep 15 18:52:14 2022 ] Mean test loss of 258 batches: 2.0189566612243652.
338
+ [ Thu Sep 15 18:52:15 2022 ] Top1: 57.08%
339
+ [ Thu Sep 15 18:52:15 2022 ] Top5: 87.01%
340
+ [ Thu Sep 15 18:52:15 2022 ] Training epoch: 55
341
+ [ Thu Sep 15 18:52:40 2022 ] Batch(57/123) done. Loss: 0.2622 lr:0.100000 network_time: 0.0501
342
+ [ Thu Sep 15 18:53:04 2022 ] Eval epoch: 55
343
+ [ Thu Sep 15 18:53:26 2022 ] Mean test loss of 258 batches: 2.4324235916137695.
344
+ [ Thu Sep 15 18:53:26 2022 ] Top1: 52.70%
345
+ [ Thu Sep 15 18:53:26 2022 ] Top5: 84.04%
346
+ [ Thu Sep 15 18:53:26 2022 ] Training epoch: 56
347
+ [ Thu Sep 15 18:53:43 2022 ] Batch(34/123) done. Loss: 0.1660 lr:0.100000 network_time: 0.0509
348
+ [ Thu Sep 15 18:54:15 2022 ] Eval epoch: 56
349
+ [ Thu Sep 15 18:54:38 2022 ] Mean test loss of 258 batches: 2.0325913429260254.
350
+ [ Thu Sep 15 18:54:38 2022 ] Top1: 55.20%
351
+ [ Thu Sep 15 18:54:38 2022 ] Top5: 85.80%
352
+ [ Thu Sep 15 18:54:38 2022 ] Training epoch: 57
353
+ [ Thu Sep 15 18:54:46 2022 ] Batch(11/123) done. Loss: 0.1341 lr:0.100000 network_time: 0.0484
354
+ [ Thu Sep 15 18:55:22 2022 ] Batch(111/123) done. Loss: 0.1991 lr:0.100000 network_time: 0.0486
355
+ [ Thu Sep 15 18:55:27 2022 ] Eval epoch: 57
356
+ [ Thu Sep 15 18:55:49 2022 ] Mean test loss of 258 batches: 2.4592440128326416.
357
+ [ Thu Sep 15 18:55:49 2022 ] Top1: 52.16%
358
+ [ Thu Sep 15 18:55:49 2022 ] Top5: 85.21%
359
+ [ Thu Sep 15 18:55:49 2022 ] Training epoch: 58
360
+ [ Thu Sep 15 18:56:26 2022 ] Batch(88/123) done. Loss: 0.2833 lr:0.100000 network_time: 0.0509
361
+ [ Thu Sep 15 18:56:39 2022 ] Eval epoch: 58
362
+ [ Thu Sep 15 18:57:01 2022 ] Mean test loss of 258 batches: 2.467912197113037.
363
+ [ Thu Sep 15 18:57:01 2022 ] Top1: 51.36%
364
+ [ Thu Sep 15 18:57:01 2022 ] Top5: 83.87%
365
+ [ Thu Sep 15 18:57:01 2022 ] Training epoch: 59
366
+ [ Thu Sep 15 18:57:29 2022 ] Batch(65/123) done. Loss: 0.1076 lr:0.100000 network_time: 0.0481
367
+ [ Thu Sep 15 18:57:50 2022 ] Eval epoch: 59
368
+ [ Thu Sep 15 18:58:12 2022 ] Mean test loss of 258 batches: 2.4239609241485596.
369
+ [ Thu Sep 15 18:58:12 2022 ] Top1: 53.31%
370
+ [ Thu Sep 15 18:58:12 2022 ] Top5: 84.36%
371
+ [ Thu Sep 15 18:58:12 2022 ] Training epoch: 60
372
+ [ Thu Sep 15 18:58:31 2022 ] Batch(42/123) done. Loss: 0.1055 lr:0.100000 network_time: 0.0495
373
+ [ Thu Sep 15 18:59:01 2022 ] Eval epoch: 60
374
+ [ Thu Sep 15 18:59:23 2022 ] Mean test loss of 258 batches: 3.575775146484375.
375
+ [ Thu Sep 15 18:59:23 2022 ] Top1: 43.85%
376
+ [ Thu Sep 15 18:59:23 2022 ] Top5: 77.35%
377
+ [ Thu Sep 15 18:59:23 2022 ] Training epoch: 61
378
+ [ Thu Sep 15 18:59:34 2022 ] Batch(19/123) done. Loss: 0.0923 lr:0.010000 network_time: 0.0500
379
+ [ Thu Sep 15 19:00:11 2022 ] Batch(119/123) done. Loss: 0.2071 lr:0.010000 network_time: 0.0513
380
+ [ Thu Sep 15 19:00:13 2022 ] Eval epoch: 61
381
+ [ Thu Sep 15 19:00:35 2022 ] Mean test loss of 258 batches: 1.877470850944519.
382
+ [ Thu Sep 15 19:00:35 2022 ] Top1: 60.21%
383
+ [ Thu Sep 15 19:00:35 2022 ] Top5: 88.57%
384
+ [ Thu Sep 15 19:00:35 2022 ] Training epoch: 62
385
+ [ Thu Sep 15 19:01:14 2022 ] Batch(96/123) done. Loss: 0.0809 lr:0.010000 network_time: 0.0479
386
+ [ Thu Sep 15 19:01:24 2022 ] Eval epoch: 62
387
+ [ Thu Sep 15 19:01:46 2022 ] Mean test loss of 258 batches: 1.7835830450057983.
388
+ [ Thu Sep 15 19:01:46 2022 ] Top1: 61.78%
389
+ [ Thu Sep 15 19:01:46 2022 ] Top5: 89.26%
390
+ [ Thu Sep 15 19:01:46 2022 ] Training epoch: 63
391
+ [ Thu Sep 15 19:02:17 2022 ] Batch(73/123) done. Loss: 0.0213 lr:0.010000 network_time: 0.0486
392
+ [ Thu Sep 15 19:02:35 2022 ] Eval epoch: 63
393
+ [ Thu Sep 15 19:02:57 2022 ] Mean test loss of 258 batches: 1.9798433780670166.
394
+ [ Thu Sep 15 19:02:57 2022 ] Top1: 60.31%
395
+ [ Thu Sep 15 19:02:57 2022 ] Top5: 88.11%
396
+ [ Thu Sep 15 19:02:57 2022 ] Training epoch: 64
397
+ [ Thu Sep 15 19:03:19 2022 ] Batch(50/123) done. Loss: 0.0367 lr:0.010000 network_time: 0.0499
398
+ [ Thu Sep 15 19:03:46 2022 ] Eval epoch: 64
399
+ [ Thu Sep 15 19:04:08 2022 ] Mean test loss of 258 batches: 1.7843399047851562.
400
+ [ Thu Sep 15 19:04:08 2022 ] Top1: 63.04%
401
+ [ Thu Sep 15 19:04:08 2022 ] Top5: 89.64%
402
+ [ Thu Sep 15 19:04:08 2022 ] Training epoch: 65
403
+ [ Thu Sep 15 19:04:22 2022 ] Batch(27/123) done. Loss: 0.0191 lr:0.010000 network_time: 0.0488
404
+ [ Thu Sep 15 19:04:57 2022 ] Eval epoch: 65
405
+ [ Thu Sep 15 19:05:19 2022 ] Mean test loss of 258 batches: 1.744349718093872.
406
+ [ Thu Sep 15 19:05:19 2022 ] Top1: 63.47%
407
+ [ Thu Sep 15 19:05:19 2022 ] Top5: 89.96%
408
+ [ Thu Sep 15 19:05:19 2022 ] Training epoch: 66
409
+ [ Thu Sep 15 19:05:24 2022 ] Batch(4/123) done. Loss: 0.0174 lr:0.010000 network_time: 0.0475
410
+ [ Thu Sep 15 19:06:01 2022 ] Batch(104/123) done. Loss: 0.0277 lr:0.010000 network_time: 0.0517
411
+ [ Thu Sep 15 19:06:08 2022 ] Eval epoch: 66
412
+ [ Thu Sep 15 19:06:30 2022 ] Mean test loss of 258 batches: 1.8125786781311035.
413
+ [ Thu Sep 15 19:06:31 2022 ] Top1: 62.75%
414
+ [ Thu Sep 15 19:06:31 2022 ] Top5: 89.50%
415
+ [ Thu Sep 15 19:06:31 2022 ] Training epoch: 67
416
+ [ Thu Sep 15 19:07:05 2022 ] Batch(81/123) done. Loss: 0.0325 lr:0.010000 network_time: 0.0499
417
+ [ Thu Sep 15 19:07:20 2022 ] Eval epoch: 67
418
+ [ Thu Sep 15 19:07:42 2022 ] Mean test loss of 258 batches: 1.9440727233886719.
419
+ [ Thu Sep 15 19:07:42 2022 ] Top1: 60.93%
420
+ [ Thu Sep 15 19:07:42 2022 ] Top5: 88.65%
421
+ [ Thu Sep 15 19:07:43 2022 ] Training epoch: 68
422
+ [ Thu Sep 15 19:08:08 2022 ] Batch(58/123) done. Loss: 0.0422 lr:0.010000 network_time: 0.0535
423
+ [ Thu Sep 15 19:08:32 2022 ] Eval epoch: 68
424
+ [ Thu Sep 15 19:08:55 2022 ] Mean test loss of 258 batches: 1.741814136505127.
425
+ [ Thu Sep 15 19:08:55 2022 ] Top1: 63.69%
426
+ [ Thu Sep 15 19:08:55 2022 ] Top5: 89.97%
427
+ [ Thu Sep 15 19:08:55 2022 ] Training epoch: 69
428
+ [ Thu Sep 15 19:09:12 2022 ] Batch(35/123) done. Loss: 0.0118 lr:0.010000 network_time: 0.0488
429
+ [ Thu Sep 15 19:09:44 2022 ] Eval epoch: 69
430
+ [ Thu Sep 15 19:10:06 2022 ] Mean test loss of 258 batches: 1.7398862838745117.
431
+ [ Thu Sep 15 19:10:06 2022 ] Top1: 63.49%
432
+ [ Thu Sep 15 19:10:06 2022 ] Top5: 90.01%
433
+ [ Thu Sep 15 19:10:06 2022 ] Training epoch: 70
434
+ [ Thu Sep 15 19:10:15 2022 ] Batch(12/123) done. Loss: 0.0105 lr:0.010000 network_time: 0.0503
435
+ [ Thu Sep 15 19:10:52 2022 ] Batch(112/123) done. Loss: 0.0089 lr:0.010000 network_time: 0.0495
436
+ [ Thu Sep 15 19:10:56 2022 ] Eval epoch: 70
437
+ [ Thu Sep 15 19:11:18 2022 ] Mean test loss of 258 batches: 1.9365839958190918.
438
+ [ Thu Sep 15 19:11:18 2022 ] Top1: 60.96%
439
+ [ Thu Sep 15 19:11:18 2022 ] Top5: 88.48%
440
+ [ Thu Sep 15 19:11:18 2022 ] Training epoch: 71
441
+ [ Thu Sep 15 19:11:55 2022 ] Batch(89/123) done. Loss: 0.0112 lr:0.010000 network_time: 0.0486
442
+ [ Thu Sep 15 19:12:07 2022 ] Eval epoch: 71
443
+ [ Thu Sep 15 19:12:29 2022 ] Mean test loss of 258 batches: 1.9486991167068481.
444
+ [ Thu Sep 15 19:12:29 2022 ] Top1: 61.18%
445
+ [ Thu Sep 15 19:12:29 2022 ] Top5: 88.75%
446
+ [ Thu Sep 15 19:12:30 2022 ] Training epoch: 72
447
+ [ Thu Sep 15 19:12:57 2022 ] Batch(66/123) done. Loss: 0.0345 lr:0.010000 network_time: 0.0501
448
+ [ Thu Sep 15 19:13:18 2022 ] Eval epoch: 72
449
+ [ Thu Sep 15 19:13:40 2022 ] Mean test loss of 258 batches: 1.7845265865325928.
450
+ [ Thu Sep 15 19:13:41 2022 ] Top1: 63.67%
451
+ [ Thu Sep 15 19:13:41 2022 ] Top5: 89.93%
452
+ [ Thu Sep 15 19:13:41 2022 ] Training epoch: 73
453
+ [ Thu Sep 15 19:14:01 2022 ] Batch(43/123) done. Loss: 0.0063 lr:0.010000 network_time: 0.0545
454
+ [ Thu Sep 15 19:14:30 2022 ] Eval epoch: 73
455
+ [ Thu Sep 15 19:14:52 2022 ] Mean test loss of 258 batches: 1.789925217628479.
456
+ [ Thu Sep 15 19:14:52 2022 ] Top1: 63.49%
457
+ [ Thu Sep 15 19:14:52 2022 ] Top5: 89.93%
458
+ [ Thu Sep 15 19:14:52 2022 ] Training epoch: 74
459
+ [ Thu Sep 15 19:15:04 2022 ] Batch(20/123) done. Loss: 0.0044 lr:0.010000 network_time: 0.0546
460
+ [ Thu Sep 15 19:15:41 2022 ] Batch(120/123) done. Loss: 0.0185 lr:0.010000 network_time: 0.0510
461
+ [ Thu Sep 15 19:15:42 2022 ] Eval epoch: 74
462
+ [ Thu Sep 15 19:16:04 2022 ] Mean test loss of 258 batches: 2.539682388305664.
463
+ [ Thu Sep 15 19:16:04 2022 ] Top1: 55.62%
464
+ [ Thu Sep 15 19:16:04 2022 ] Top5: 83.94%
465
+ [ Thu Sep 15 19:16:04 2022 ] Training epoch: 75
466
+ [ Thu Sep 15 19:16:44 2022 ] Batch(97/123) done. Loss: 0.0110 lr:0.010000 network_time: 0.0490
467
+ [ Thu Sep 15 19:16:53 2022 ] Eval epoch: 75
468
+ [ Thu Sep 15 19:17:16 2022 ] Mean test loss of 258 batches: 1.8047730922698975.
469
+ [ Thu Sep 15 19:17:16 2022 ] Top1: 63.53%
470
+ [ Thu Sep 15 19:17:16 2022 ] Top5: 90.11%
471
+ [ Thu Sep 15 19:17:16 2022 ] Training epoch: 76
472
+ [ Thu Sep 15 19:17:47 2022 ] Batch(74/123) done. Loss: 0.0177 lr:0.010000 network_time: 0.0472
473
+ [ Thu Sep 15 19:18:05 2022 ] Eval epoch: 76
474
+ [ Thu Sep 15 19:18:27 2022 ] Mean test loss of 258 batches: 1.815438985824585.
475
+ [ Thu Sep 15 19:18:27 2022 ] Top1: 63.13%
476
+ [ Thu Sep 15 19:18:27 2022 ] Top5: 89.87%
477
+ [ Thu Sep 15 19:18:27 2022 ] Training epoch: 77
478
+ [ Thu Sep 15 19:18:50 2022 ] Batch(51/123) done. Loss: 0.0060 lr:0.010000 network_time: 0.0566
479
+ [ Thu Sep 15 19:19:16 2022 ] Eval epoch: 77
480
+ [ Thu Sep 15 19:19:38 2022 ] Mean test loss of 258 batches: 1.7667587995529175.
481
+ [ Thu Sep 15 19:19:38 2022 ] Top1: 63.53%
482
+ [ Thu Sep 15 19:19:38 2022 ] Top5: 90.14%
483
+ [ Thu Sep 15 19:19:38 2022 ] Training epoch: 78
484
+ [ Thu Sep 15 19:19:53 2022 ] Batch(28/123) done. Loss: 0.0132 lr:0.010000 network_time: 0.0503
485
+ [ Thu Sep 15 19:20:28 2022 ] Eval epoch: 78
486
+ [ Thu Sep 15 19:20:50 2022 ] Mean test loss of 258 batches: 1.8042775392532349.
487
+ [ Thu Sep 15 19:20:50 2022 ] Top1: 63.43%
488
+ [ Thu Sep 15 19:20:50 2022 ] Top5: 90.12%
489
+ [ Thu Sep 15 19:20:50 2022 ] Training epoch: 79
490
+ [ Thu Sep 15 19:20:56 2022 ] Batch(5/123) done. Loss: 0.0055 lr:0.010000 network_time: 0.0501
491
+ [ Thu Sep 15 19:21:33 2022 ] Batch(105/123) done. Loss: 0.0050 lr:0.010000 network_time: 0.0499
492
+ [ Thu Sep 15 19:21:39 2022 ] Eval epoch: 79
493
+ [ Thu Sep 15 19:22:02 2022 ] Mean test loss of 258 batches: 1.7913448810577393.
494
+ [ Thu Sep 15 19:22:02 2022 ] Top1: 63.30%
495
+ [ Thu Sep 15 19:22:02 2022 ] Top5: 89.98%
496
+ [ Thu Sep 15 19:22:02 2022 ] Training epoch: 80
497
+ [ Thu Sep 15 19:22:36 2022 ] Batch(82/123) done. Loss: 0.0145 lr:0.010000 network_time: 0.0501
498
+ [ Thu Sep 15 19:22:51 2022 ] Eval epoch: 80
499
+ [ Thu Sep 15 19:23:13 2022 ] Mean test loss of 258 batches: 1.7956622838974.
500
+ [ Thu Sep 15 19:23:13 2022 ] Top1: 63.67%
501
+ [ Thu Sep 15 19:23:13 2022 ] Top5: 90.21%
502
+ [ Thu Sep 15 19:23:13 2022 ] Training epoch: 81
503
+ [ Thu Sep 15 19:23:39 2022 ] Batch(59/123) done. Loss: 0.0074 lr:0.001000 network_time: 0.0484
504
+ [ Thu Sep 15 19:24:03 2022 ] Eval epoch: 81
505
+ [ Thu Sep 15 19:24:24 2022 ] Mean test loss of 258 batches: 1.8657962083816528.
506
+ [ Thu Sep 15 19:24:24 2022 ] Top1: 62.61%
507
+ [ Thu Sep 15 19:24:24 2022 ] Top5: 89.59%
508
+ [ Thu Sep 15 19:24:24 2022 ] Training epoch: 82
509
+ [ Thu Sep 15 19:24:42 2022 ] Batch(36/123) done. Loss: 0.0165 lr:0.001000 network_time: 0.0491
510
+ [ Thu Sep 15 19:25:14 2022 ] Eval epoch: 82
511
+ [ Thu Sep 15 19:25:36 2022 ] Mean test loss of 258 batches: 1.816205620765686.
512
+ [ Thu Sep 15 19:25:36 2022 ] Top1: 63.49%
513
+ [ Thu Sep 15 19:25:36 2022 ] Top5: 89.88%
514
+ [ Thu Sep 15 19:25:36 2022 ] Training epoch: 83
515
+ [ Thu Sep 15 19:25:45 2022 ] Batch(13/123) done. Loss: 0.0259 lr:0.001000 network_time: 0.0535
516
+ [ Thu Sep 15 19:26:21 2022 ] Batch(113/123) done. Loss: 0.0102 lr:0.001000 network_time: 0.0512
517
+ [ Thu Sep 15 19:26:25 2022 ] Eval epoch: 83
518
+ [ Thu Sep 15 19:26:47 2022 ] Mean test loss of 258 batches: 1.8096046447753906.
519
+ [ Thu Sep 15 19:26:47 2022 ] Top1: 63.41%
520
+ [ Thu Sep 15 19:26:47 2022 ] Top5: 90.00%
521
+ [ Thu Sep 15 19:26:47 2022 ] Training epoch: 84
522
+ [ Thu Sep 15 19:27:24 2022 ] Batch(90/123) done. Loss: 0.0099 lr:0.001000 network_time: 0.0497
523
+ [ Thu Sep 15 19:27:36 2022 ] Eval epoch: 84
524
+ [ Thu Sep 15 19:27:58 2022 ] Mean test loss of 258 batches: 1.8829015493392944.
525
+ [ Thu Sep 15 19:27:58 2022 ] Top1: 62.32%
526
+ [ Thu Sep 15 19:27:58 2022 ] Top5: 89.57%
527
+ [ Thu Sep 15 19:27:59 2022 ] Training epoch: 85
528
+ [ Thu Sep 15 19:28:27 2022 ] Batch(67/123) done. Loss: 0.0425 lr:0.001000 network_time: 0.0476
529
+ [ Thu Sep 15 19:28:48 2022 ] Eval epoch: 85
530
+ [ Thu Sep 15 19:29:09 2022 ] Mean test loss of 258 batches: 1.8470373153686523.
531
+ [ Thu Sep 15 19:29:10 2022 ] Top1: 63.13%
532
+ [ Thu Sep 15 19:29:10 2022 ] Top5: 89.90%
533
+ [ Thu Sep 15 19:29:10 2022 ] Training epoch: 86
534
+ [ Thu Sep 15 19:29:30 2022 ] Batch(44/123) done. Loss: 0.0200 lr:0.001000 network_time: 0.0479
535
+ [ Thu Sep 15 19:29:59 2022 ] Eval epoch: 86
536
+ [ Thu Sep 15 19:30:21 2022 ] Mean test loss of 258 batches: 1.75849187374115.
537
+ [ Thu Sep 15 19:30:21 2022 ] Top1: 64.07%
538
+ [ Thu Sep 15 19:30:21 2022 ] Top5: 90.30%
539
+ [ Thu Sep 15 19:30:21 2022 ] Training epoch: 87
540
+ [ Thu Sep 15 19:30:33 2022 ] Batch(21/123) done. Loss: 0.0124 lr:0.001000 network_time: 0.0509
541
+ [ Thu Sep 15 19:31:10 2022 ] Batch(121/123) done. Loss: 0.0037 lr:0.001000 network_time: 0.0508
542
+ [ Thu Sep 15 19:31:10 2022 ] Eval epoch: 87
543
+ [ Thu Sep 15 19:31:32 2022 ] Mean test loss of 258 batches: 1.8853404521942139.
544
+ [ Thu Sep 15 19:31:32 2022 ] Top1: 62.67%
545
+ [ Thu Sep 15 19:31:32 2022 ] Top5: 89.62%
546
+ [ Thu Sep 15 19:31:33 2022 ] Training epoch: 88
547
+ [ Thu Sep 15 19:32:13 2022 ] Batch(98/123) done. Loss: 0.0058 lr:0.001000 network_time: 0.0507
548
+ [ Thu Sep 15 19:32:22 2022 ] Eval epoch: 88
549
+ [ Thu Sep 15 19:32:44 2022 ] Mean test loss of 258 batches: 1.8328523635864258.
550
+ [ Thu Sep 15 19:32:44 2022 ] Top1: 63.30%
551
+ [ Thu Sep 15 19:32:44 2022 ] Top5: 89.90%
552
+ [ Thu Sep 15 19:32:44 2022 ] Training epoch: 89
553
+ [ Thu Sep 15 19:33:16 2022 ] Batch(75/123) done. Loss: 0.0281 lr:0.001000 network_time: 0.0507
554
+ [ Thu Sep 15 19:33:33 2022 ] Eval epoch: 89
555
+ [ Thu Sep 15 19:33:55 2022 ] Mean test loss of 258 batches: 1.786879539489746.
556
+ [ Thu Sep 15 19:33:55 2022 ] Top1: 63.64%
557
+ [ Thu Sep 15 19:33:55 2022 ] Top5: 90.19%
558
+ [ Thu Sep 15 19:33:55 2022 ] Training epoch: 90
559
+ [ Thu Sep 15 19:34:18 2022 ] Batch(52/123) done. Loss: 0.0072 lr:0.001000 network_time: 0.0499
560
+ [ Thu Sep 15 19:34:44 2022 ] Eval epoch: 90
561
+ [ Thu Sep 15 19:35:06 2022 ] Mean test loss of 258 batches: 1.8092989921569824.
562
+ [ Thu Sep 15 19:35:06 2022 ] Top1: 63.61%
563
+ [ Thu Sep 15 19:35:06 2022 ] Top5: 89.99%
564
+ [ Thu Sep 15 19:35:07 2022 ] Training epoch: 91
565
+ [ Thu Sep 15 19:35:21 2022 ] Batch(29/123) done. Loss: 0.0100 lr:0.001000 network_time: 0.0501
566
+ [ Thu Sep 15 19:35:56 2022 ] Eval epoch: 91
567
+ [ Thu Sep 15 19:36:18 2022 ] Mean test loss of 258 batches: 1.7964448928833008.
568
+ [ Thu Sep 15 19:36:18 2022 ] Top1: 63.43%
569
+ [ Thu Sep 15 19:36:18 2022 ] Top5: 90.09%
570
+ [ Thu Sep 15 19:36:18 2022 ] Training epoch: 92
571
+ [ Thu Sep 15 19:36:24 2022 ] Batch(6/123) done. Loss: 0.0078 lr:0.001000 network_time: 0.0484
572
+ [ Thu Sep 15 19:37:01 2022 ] Batch(106/123) done. Loss: 0.0191 lr:0.001000 network_time: 0.0503
573
+ [ Thu Sep 15 19:37:07 2022 ] Eval epoch: 92
574
+ [ Thu Sep 15 19:37:29 2022 ] Mean test loss of 258 batches: 1.8964364528656006.
575
+ [ Thu Sep 15 19:37:29 2022 ] Top1: 62.33%
576
+ [ Thu Sep 15 19:37:29 2022 ] Top5: 89.44%
577
+ [ Thu Sep 15 19:37:29 2022 ] Training epoch: 93
578
+ [ Thu Sep 15 19:38:04 2022 ] Batch(83/123) done. Loss: 0.0090 lr:0.001000 network_time: 0.0504
579
+ [ Thu Sep 15 19:38:18 2022 ] Eval epoch: 93
580
+ [ Thu Sep 15 19:38:40 2022 ] Mean test loss of 258 batches: 1.8207327127456665.
581
+ [ Thu Sep 15 19:38:40 2022 ] Top1: 63.50%
582
+ [ Thu Sep 15 19:38:40 2022 ] Top5: 90.06%
583
+ [ Thu Sep 15 19:38:40 2022 ] Training epoch: 94
584
+ [ Thu Sep 15 19:39:07 2022 ] Batch(60/123) done. Loss: 0.0136 lr:0.001000 network_time: 0.0513
585
+ [ Thu Sep 15 19:39:30 2022 ] Eval epoch: 94
586
+ [ Thu Sep 15 19:39:51 2022 ] Mean test loss of 258 batches: 1.8205301761627197.
587
+ [ Thu Sep 15 19:39:52 2022 ] Top1: 63.17%
588
+ [ Thu Sep 15 19:39:52 2022 ] Top5: 89.72%
589
+ [ Thu Sep 15 19:39:52 2022 ] Training epoch: 95
590
+ [ Thu Sep 15 19:40:09 2022 ] Batch(37/123) done. Loss: 0.0293 lr:0.001000 network_time: 0.0525
591
+ [ Thu Sep 15 19:40:41 2022 ] Eval epoch: 95
592
+ [ Thu Sep 15 19:41:03 2022 ] Mean test loss of 258 batches: 1.797655701637268.
593
+ [ Thu Sep 15 19:41:03 2022 ] Top1: 63.70%
594
+ [ Thu Sep 15 19:41:03 2022 ] Top5: 89.80%
595
+ [ Thu Sep 15 19:41:03 2022 ] Training epoch: 96
596
+ [ Thu Sep 15 19:41:12 2022 ] Batch(14/123) done. Loss: 0.0108 lr:0.001000 network_time: 0.0514
597
+ [ Thu Sep 15 19:41:49 2022 ] Batch(114/123) done. Loss: 0.0097 lr:0.001000 network_time: 0.0488
598
+ [ Thu Sep 15 19:41:52 2022 ] Eval epoch: 96
599
+ [ Thu Sep 15 19:42:14 2022 ] Mean test loss of 258 batches: 1.812503695487976.
600
+ [ Thu Sep 15 19:42:14 2022 ] Top1: 63.56%
601
+ [ Thu Sep 15 19:42:14 2022 ] Top5: 90.00%
602
+ [ Thu Sep 15 19:42:14 2022 ] Training epoch: 97
603
+ [ Thu Sep 15 19:42:52 2022 ] Batch(91/123) done. Loss: 0.0153 lr:0.001000 network_time: 0.0531
604
+ [ Thu Sep 15 19:43:03 2022 ] Eval epoch: 97
605
+ [ Thu Sep 15 19:43:25 2022 ] Mean test loss of 258 batches: 2.090479850769043.
606
+ [ Thu Sep 15 19:43:25 2022 ] Top1: 59.76%
607
+ [ Thu Sep 15 19:43:25 2022 ] Top5: 87.82%
608
+ [ Thu Sep 15 19:43:26 2022 ] Training epoch: 98
609
+ [ Thu Sep 15 19:43:54 2022 ] Batch(68/123) done. Loss: 0.0160 lr:0.001000 network_time: 0.0525
610
+ [ Thu Sep 15 19:44:15 2022 ] Eval epoch: 98
611
+ [ Thu Sep 15 19:44:36 2022 ] Mean test loss of 258 batches: 1.7830455303192139.
612
+ [ Thu Sep 15 19:44:36 2022 ] Top1: 63.97%
613
+ [ Thu Sep 15 19:44:37 2022 ] Top5: 90.38%
614
+ [ Thu Sep 15 19:44:37 2022 ] Training epoch: 99
615
+ [ Thu Sep 15 19:44:57 2022 ] Batch(45/123) done. Loss: 0.0050 lr:0.001000 network_time: 0.0537
616
+ [ Thu Sep 15 19:45:25 2022 ] Eval epoch: 99
617
+ [ Thu Sep 15 19:45:47 2022 ] Mean test loss of 258 batches: 1.8485912084579468.
618
+ [ Thu Sep 15 19:45:47 2022 ] Top1: 63.38%
619
+ [ Thu Sep 15 19:45:47 2022 ] Top5: 89.60%
620
+ [ Thu Sep 15 19:45:47 2022 ] Training epoch: 100
621
+ [ Thu Sep 15 19:46:00 2022 ] Batch(22/123) done. Loss: 0.0056 lr:0.001000 network_time: 0.0519
622
+ [ Thu Sep 15 19:46:36 2022 ] Batch(122/123) done. Loss: 0.0069 lr:0.001000 network_time: 0.0516
623
+ [ Thu Sep 15 19:46:37 2022 ] Eval epoch: 100
624
+ [ Thu Sep 15 19:46:59 2022 ] Mean test loss of 258 batches: 1.8104979991912842.
625
+ [ Thu Sep 15 19:46:59 2022 ] Top1: 63.95%
626
+ [ Thu Sep 15 19:46:59 2022 ] Top5: 90.19%
627
+ [ Thu Sep 15 19:46:59 2022 ] Training epoch: 101
628
+ [ Thu Sep 15 19:47:39 2022 ] Batch(99/123) done. Loss: 0.0266 lr:0.000100 network_time: 0.0471
629
+ [ Thu Sep 15 19:47:48 2022 ] Eval epoch: 101
630
+ [ Thu Sep 15 19:48:10 2022 ] Mean test loss of 258 batches: 2.072659730911255.
631
+ [ Thu Sep 15 19:48:10 2022 ] Top1: 60.56%
632
+ [ Thu Sep 15 19:48:10 2022 ] Top5: 88.27%
633
+ [ Thu Sep 15 19:48:10 2022 ] Training epoch: 102
634
+ [ Thu Sep 15 19:48:42 2022 ] Batch(76/123) done. Loss: 0.0156 lr:0.000100 network_time: 0.0511
635
+ [ Thu Sep 15 19:48:59 2022 ] Eval epoch: 102
636
+ [ Thu Sep 15 19:49:21 2022 ] Mean test loss of 258 batches: 1.7830250263214111.
637
+ [ Thu Sep 15 19:49:21 2022 ] Top1: 63.90%
638
+ [ Thu Sep 15 19:49:21 2022 ] Top5: 90.25%
639
+ [ Thu Sep 15 19:49:21 2022 ] Training epoch: 103
640
+ [ Thu Sep 15 19:49:45 2022 ] Batch(53/123) done. Loss: 0.0099 lr:0.000100 network_time: 0.0513
641
+ [ Thu Sep 15 19:50:11 2022 ] Eval epoch: 103
642
+ [ Thu Sep 15 19:50:33 2022 ] Mean test loss of 258 batches: 1.8229682445526123.
643
+ [ Thu Sep 15 19:50:33 2022 ] Top1: 63.18%
644
+ [ Thu Sep 15 19:50:33 2022 ] Top5: 89.80%
645
+ [ Thu Sep 15 19:50:33 2022 ] Training epoch: 104
646
+ [ Thu Sep 15 19:50:48 2022 ] Batch(30/123) done. Loss: 0.0051 lr:0.000100 network_time: 0.0522
647
+ [ Thu Sep 15 19:51:22 2022 ] Eval epoch: 104
648
+ [ Thu Sep 15 19:51:43 2022 ] Mean test loss of 258 batches: 1.7732776403427124.
649
+ [ Thu Sep 15 19:51:43 2022 ] Top1: 63.84%
650
+ [ Thu Sep 15 19:51:43 2022 ] Top5: 90.27%
651
+ [ Thu Sep 15 19:51:44 2022 ] Training epoch: 105
652
+ [ Thu Sep 15 19:51:50 2022 ] Batch(7/123) done. Loss: 0.0063 lr:0.000100 network_time: 0.0545
653
+ [ Thu Sep 15 19:52:27 2022 ] Batch(107/123) done. Loss: 0.0102 lr:0.000100 network_time: 0.0489
654
+ [ Thu Sep 15 19:52:33 2022 ] Eval epoch: 105
655
+ [ Thu Sep 15 19:52:55 2022 ] Mean test loss of 258 batches: 1.8950495719909668.
656
+ [ Thu Sep 15 19:52:55 2022 ] Top1: 63.23%
657
+ [ Thu Sep 15 19:52:55 2022 ] Top5: 89.54%
658
+ [ Thu Sep 15 19:52:55 2022 ] Training epoch: 106
659
+ [ Thu Sep 15 19:53:30 2022 ] Batch(84/123) done. Loss: 0.0049 lr:0.000100 network_time: 0.0528
660
+ [ Thu Sep 15 19:53:44 2022 ] Eval epoch: 106
661
+ [ Thu Sep 15 19:54:06 2022 ] Mean test loss of 258 batches: 1.841883659362793.
662
+ [ Thu Sep 15 19:54:06 2022 ] Top1: 62.84%
663
+ [ Thu Sep 15 19:54:06 2022 ] Top5: 89.73%
664
+ [ Thu Sep 15 19:54:06 2022 ] Training epoch: 107
665
+ [ Thu Sep 15 19:54:33 2022 ] Batch(61/123) done. Loss: 0.0210 lr:0.000100 network_time: 0.0536
666
+ [ Thu Sep 15 19:54:55 2022 ] Eval epoch: 107
667
+ [ Thu Sep 15 19:55:17 2022 ] Mean test loss of 258 batches: 1.8099277019500732.
668
+ [ Thu Sep 15 19:55:17 2022 ] Top1: 63.50%
669
+ [ Thu Sep 15 19:55:18 2022 ] Top5: 90.20%
670
+ [ Thu Sep 15 19:55:18 2022 ] Training epoch: 108
671
+ [ Thu Sep 15 19:55:36 2022 ] Batch(38/123) done. Loss: 0.0105 lr:0.000100 network_time: 0.0517
672
+ [ Thu Sep 15 19:56:07 2022 ] Eval epoch: 108
673
+ [ Thu Sep 15 19:56:29 2022 ] Mean test loss of 258 batches: 1.7447724342346191.
674
+ [ Thu Sep 15 19:56:29 2022 ] Top1: 64.22%
675
+ [ Thu Sep 15 19:56:29 2022 ] Top5: 90.49%
676
+ [ Thu Sep 15 19:56:29 2022 ] Training epoch: 109
677
+ [ Thu Sep 15 19:56:39 2022 ] Batch(15/123) done. Loss: 0.0234 lr:0.000100 network_time: 0.0460
678
+ [ Thu Sep 15 19:57:15 2022 ] Batch(115/123) done. Loss: 0.0054 lr:0.000100 network_time: 0.0537
679
+ [ Thu Sep 15 19:57:18 2022 ] Eval epoch: 109
680
+ [ Thu Sep 15 19:57:40 2022 ] Mean test loss of 258 batches: 1.908057451248169.
681
+ [ Thu Sep 15 19:57:40 2022 ] Top1: 62.76%
682
+ [ Thu Sep 15 19:57:40 2022 ] Top5: 89.36%
683
+ [ Thu Sep 15 19:57:40 2022 ] Training epoch: 110
684
+ [ Thu Sep 15 19:58:18 2022 ] Batch(92/123) done. Loss: 0.0211 lr:0.000100 network_time: 0.0511
685
+ [ Thu Sep 15 19:58:29 2022 ] Eval epoch: 110
686
+ [ Thu Sep 15 19:58:51 2022 ] Mean test loss of 258 batches: 1.8803648948669434.
687
+ [ Thu Sep 15 19:58:51 2022 ] Top1: 62.55%
688
+ [ Thu Sep 15 19:58:51 2022 ] Top5: 89.43%
689
+ [ Thu Sep 15 19:58:51 2022 ] Training epoch: 111
690
+ [ Thu Sep 15 19:59:21 2022 ] Batch(69/123) done. Loss: 0.0044 lr:0.000100 network_time: 0.0523
691
+ [ Thu Sep 15 19:59:41 2022 ] Eval epoch: 111
692
+ [ Thu Sep 15 20:00:02 2022 ] Mean test loss of 258 batches: 1.7714176177978516.
693
+ [ Thu Sep 15 20:00:02 2022 ] Top1: 64.00%
694
+ [ Thu Sep 15 20:00:02 2022 ] Top5: 90.25%
695
+ [ Thu Sep 15 20:00:03 2022 ] Training epoch: 112
696
+ [ Thu Sep 15 20:00:23 2022 ] Batch(46/123) done. Loss: 0.0086 lr:0.000100 network_time: 0.0493
697
+ [ Thu Sep 15 20:00:52 2022 ] Eval epoch: 112
698
+ [ Thu Sep 15 20:01:13 2022 ] Mean test loss of 258 batches: 1.7842161655426025.
699
+ [ Thu Sep 15 20:01:14 2022 ] Top1: 63.52%
700
+ [ Thu Sep 15 20:01:14 2022 ] Top5: 90.15%
701
+ [ Thu Sep 15 20:01:14 2022 ] Training epoch: 113
702
+ [ Thu Sep 15 20:01:26 2022 ] Batch(23/123) done. Loss: 0.0056 lr:0.000100 network_time: 0.0471
703
+ [ Thu Sep 15 20:02:03 2022 ] Eval epoch: 113
704
+ [ Thu Sep 15 20:02:25 2022 ] Mean test loss of 258 batches: 1.7943865060806274.
705
+ [ Thu Sep 15 20:02:25 2022 ] Top1: 63.93%
706
+ [ Thu Sep 15 20:02:25 2022 ] Top5: 90.07%
707
+ [ Thu Sep 15 20:02:25 2022 ] Training epoch: 114
708
+ [ Thu Sep 15 20:02:29 2022 ] Batch(0/123) done. Loss: 0.0093 lr:0.000100 network_time: 0.0932
709
+ [ Thu Sep 15 20:03:06 2022 ] Batch(100/123) done. Loss: 0.0032 lr:0.000100 network_time: 0.0536
710
+ [ Thu Sep 15 20:03:14 2022 ] Eval epoch: 114
711
+ [ Thu Sep 15 20:03:36 2022 ] Mean test loss of 258 batches: 1.8065061569213867.
712
+ [ Thu Sep 15 20:03:36 2022 ] Top1: 63.38%
713
+ [ Thu Sep 15 20:03:36 2022 ] Top5: 89.91%
714
+ [ Thu Sep 15 20:03:36 2022 ] Training epoch: 115
715
+ [ Thu Sep 15 20:04:08 2022 ] Batch(77/123) done. Loss: 0.0055 lr:0.000100 network_time: 0.0531
716
+ [ Thu Sep 15 20:04:25 2022 ] Eval epoch: 115
717
+ [ Thu Sep 15 20:04:47 2022 ] Mean test loss of 258 batches: 1.8034496307373047.
718
+ [ Thu Sep 15 20:04:47 2022 ] Top1: 63.55%
719
+ [ Thu Sep 15 20:04:47 2022 ] Top5: 90.01%
720
+ [ Thu Sep 15 20:04:47 2022 ] Training epoch: 116
721
+ [ Thu Sep 15 20:05:11 2022 ] Batch(54/123) done. Loss: 0.0097 lr:0.000100 network_time: 0.0483
722
+ [ Thu Sep 15 20:05:36 2022 ] Eval epoch: 116
723
+ [ Thu Sep 15 20:05:58 2022 ] Mean test loss of 258 batches: 1.792580008506775.
724
+ [ Thu Sep 15 20:05:59 2022 ] Top1: 63.86%
725
+ [ Thu Sep 15 20:05:59 2022 ] Top5: 90.25%
726
+ [ Thu Sep 15 20:05:59 2022 ] Training epoch: 117
727
+ [ Thu Sep 15 20:06:14 2022 ] Batch(31/123) done. Loss: 0.0100 lr:0.000100 network_time: 0.0495
728
+ [ Thu Sep 15 20:06:48 2022 ] Eval epoch: 117
729
+ [ Thu Sep 15 20:07:11 2022 ] Mean test loss of 258 batches: 1.8426971435546875.
730
+ [ Thu Sep 15 20:07:11 2022 ] Top1: 63.03%
731
+ [ Thu Sep 15 20:07:11 2022 ] Top5: 89.89%
732
+ [ Thu Sep 15 20:07:11 2022 ] Training epoch: 118
733
+ [ Thu Sep 15 20:07:18 2022 ] Batch(8/123) done. Loss: 0.0055 lr:0.000100 network_time: 0.0495
734
+ [ Thu Sep 15 20:07:55 2022 ] Batch(108/123) done. Loss: 0.0075 lr:0.000100 network_time: 0.0490
735
+ [ Thu Sep 15 20:08:00 2022 ] Eval epoch: 118
736
+ [ Thu Sep 15 20:08:22 2022 ] Mean test loss of 258 batches: 1.9181476831436157.
737
+ [ Thu Sep 15 20:08:23 2022 ] Top1: 61.90%
738
+ [ Thu Sep 15 20:08:23 2022 ] Top5: 89.36%
739
+ [ Thu Sep 15 20:08:23 2022 ] Training epoch: 119
740
+ [ Thu Sep 15 20:08:58 2022 ] Batch(85/123) done. Loss: 0.0073 lr:0.000100 network_time: 0.0516
741
+ [ Thu Sep 15 20:09:12 2022 ] Eval epoch: 119
742
+ [ Thu Sep 15 20:09:34 2022 ] Mean test loss of 258 batches: 1.851119875907898.
743
+ [ Thu Sep 15 20:09:34 2022 ] Top1: 63.08%
744
+ [ Thu Sep 15 20:09:34 2022 ] Top5: 89.56%
745
+ [ Thu Sep 15 20:09:34 2022 ] Training epoch: 120
746
+ [ Thu Sep 15 20:10:01 2022 ] Batch(62/123) done. Loss: 0.0060 lr:0.000100 network_time: 0.0499
747
+ [ Thu Sep 15 20:10:24 2022 ] Eval epoch: 120
748
+ [ Thu Sep 15 20:10:46 2022 ] Mean test loss of 258 batches: 1.8299061059951782.
749
+ [ Thu Sep 15 20:10:46 2022 ] Top1: 63.41%
750
+ [ Thu Sep 15 20:10:46 2022 ] Top5: 89.90%
751
+ [ Thu Sep 15 20:10:46 2022 ] Training epoch: 121
752
+ [ Thu Sep 15 20:11:04 2022 ] Batch(39/123) done. Loss: 0.0065 lr:0.000100 network_time: 0.0485
753
+ [ Thu Sep 15 20:11:35 2022 ] Eval epoch: 121
754
+ [ Thu Sep 15 20:11:57 2022 ] Mean test loss of 258 batches: 1.870434284210205.
755
+ [ Thu Sep 15 20:11:57 2022 ] Top1: 62.78%
756
+ [ Thu Sep 15 20:11:57 2022 ] Top5: 89.54%
757
+ [ Thu Sep 15 20:11:57 2022 ] Training epoch: 122
758
+ [ Thu Sep 15 20:12:07 2022 ] Batch(16/123) done. Loss: 0.0136 lr:0.000100 network_time: 0.0467
759
+ [ Thu Sep 15 20:12:44 2022 ] Batch(116/123) done. Loss: 0.0066 lr:0.000100 network_time: 0.0548
760
+ [ Thu Sep 15 20:12:46 2022 ] Eval epoch: 122
761
+ [ Thu Sep 15 20:13:08 2022 ] Mean test loss of 258 batches: 1.8513699769973755.
762
+ [ Thu Sep 15 20:13:08 2022 ] Top1: 63.33%
763
+ [ Thu Sep 15 20:13:08 2022 ] Top5: 89.77%
764
+ [ Thu Sep 15 20:13:08 2022 ] Training epoch: 123
765
+ [ Thu Sep 15 20:13:47 2022 ] Batch(93/123) done. Loss: 0.0063 lr:0.000100 network_time: 0.0520
766
+ [ Thu Sep 15 20:13:58 2022 ] Eval epoch: 123
767
+ [ Thu Sep 15 20:14:20 2022 ] Mean test loss of 258 batches: 1.815142035484314.
768
+ [ Thu Sep 15 20:14:20 2022 ] Top1: 63.26%
769
+ [ Thu Sep 15 20:14:20 2022 ] Top5: 89.87%
770
+ [ Thu Sep 15 20:14:20 2022 ] Training epoch: 124
771
+ [ Thu Sep 15 20:14:50 2022 ] Batch(70/123) done. Loss: 0.0084 lr:0.000100 network_time: 0.0505
772
+ [ Thu Sep 15 20:15:09 2022 ] Eval epoch: 124
773
+ [ Thu Sep 15 20:15:32 2022 ] Mean test loss of 258 batches: 1.8830796480178833.
774
+ [ Thu Sep 15 20:15:32 2022 ] Top1: 62.94%
775
+ [ Thu Sep 15 20:15:32 2022 ] Top5: 89.45%
776
+ [ Thu Sep 15 20:15:32 2022 ] Training epoch: 125
777
+ [ Thu Sep 15 20:15:53 2022 ] Batch(47/123) done. Loss: 0.0085 lr:0.000100 network_time: 0.0505
778
+ [ Thu Sep 15 20:16:21 2022 ] Eval epoch: 125
779
+ [ Thu Sep 15 20:16:44 2022 ] Mean test loss of 258 batches: 1.7838399410247803.
780
+ [ Thu Sep 15 20:16:44 2022 ] Top1: 64.08%
781
+ [ Thu Sep 15 20:16:44 2022 ] Top5: 90.30%
782
+ [ Thu Sep 15 20:16:44 2022 ] Training epoch: 126
783
+ [ Thu Sep 15 20:16:56 2022 ] Batch(24/123) done. Loss: 0.0177 lr:0.000100 network_time: 0.0495
784
+ [ Thu Sep 15 20:17:33 2022 ] Eval epoch: 126
785
+ [ Thu Sep 15 20:17:55 2022 ] Mean test loss of 258 batches: 1.8182704448699951.
786
+ [ Thu Sep 15 20:17:55 2022 ] Top1: 63.30%
787
+ [ Thu Sep 15 20:17:55 2022 ] Top5: 90.08%
788
+ [ Thu Sep 15 20:17:55 2022 ] Training epoch: 127
789
+ [ Thu Sep 15 20:17:59 2022 ] Batch(1/123) done. Loss: 0.0223 lr:0.000100 network_time: 0.0490
790
+ [ Thu Sep 15 20:18:36 2022 ] Batch(101/123) done. Loss: 0.0044 lr:0.000100 network_time: 0.0500
791
+ [ Thu Sep 15 20:18:44 2022 ] Eval epoch: 127
792
+ [ Thu Sep 15 20:19:06 2022 ] Mean test loss of 258 batches: 1.8703221082687378.
793
+ [ Thu Sep 15 20:19:06 2022 ] Top1: 62.72%
794
+ [ Thu Sep 15 20:19:06 2022 ] Top5: 89.54%
795
+ [ Thu Sep 15 20:19:06 2022 ] Training epoch: 128
796
+ [ Thu Sep 15 20:19:39 2022 ] Batch(78/123) done. Loss: 0.0040 lr:0.000100 network_time: 0.0493
797
+ [ Thu Sep 15 20:19:55 2022 ] Eval epoch: 128
798
+ [ Thu Sep 15 20:20:17 2022 ] Mean test loss of 258 batches: 1.8690118789672852.
799
+ [ Thu Sep 15 20:20:18 2022 ] Top1: 62.58%
800
+ [ Thu Sep 15 20:20:18 2022 ] Top5: 89.51%
801
+ [ Thu Sep 15 20:20:18 2022 ] Training epoch: 129
802
+ [ Thu Sep 15 20:20:42 2022 ] Batch(55/123) done. Loss: 0.0102 lr:0.000100 network_time: 0.0490
803
+ [ Thu Sep 15 20:21:07 2022 ] Eval epoch: 129
804
+ [ Thu Sep 15 20:21:29 2022 ] Mean test loss of 258 batches: 1.8351080417633057.
805
+ [ Thu Sep 15 20:21:29 2022 ] Top1: 63.52%
806
+ [ Thu Sep 15 20:21:29 2022 ] Top5: 89.91%
807
+ [ Thu Sep 15 20:21:29 2022 ] Training epoch: 130
808
+ [ Thu Sep 15 20:21:45 2022 ] Batch(32/123) done. Loss: 0.0063 lr:0.000100 network_time: 0.0524
809
+ [ Thu Sep 15 20:22:18 2022 ] Eval epoch: 130
810
+ [ Thu Sep 15 20:22:40 2022 ] Mean test loss of 258 batches: 1.9370732307434082.
811
+ [ Thu Sep 15 20:22:41 2022 ] Top1: 62.17%
812
+ [ Thu Sep 15 20:22:41 2022 ] Top5: 89.35%
813
+ [ Thu Sep 15 20:22:41 2022 ] Training epoch: 131
814
+ [ Thu Sep 15 20:22:48 2022 ] Batch(9/123) done. Loss: 0.0051 lr:0.000100 network_time: 0.0560
815
+ [ Thu Sep 15 20:23:25 2022 ] Batch(109/123) done. Loss: 0.0128 lr:0.000100 network_time: 0.0560
816
+ [ Thu Sep 15 20:23:30 2022 ] Eval epoch: 131
817
+ [ Thu Sep 15 20:23:53 2022 ] Mean test loss of 258 batches: 1.851426124572754.
818
+ [ Thu Sep 15 20:23:53 2022 ] Top1: 63.35%
819
+ [ Thu Sep 15 20:23:53 2022 ] Top5: 89.80%
820
+ [ Thu Sep 15 20:23:53 2022 ] Training epoch: 132
821
+ [ Thu Sep 15 20:24:28 2022 ] Batch(86/123) done. Loss: 0.0055 lr:0.000100 network_time: 0.0470
822
+ [ Thu Sep 15 20:24:42 2022 ] Eval epoch: 132
823
+ [ Thu Sep 15 20:25:04 2022 ] Mean test loss of 258 batches: 1.8273863792419434.
824
+ [ Thu Sep 15 20:25:04 2022 ] Top1: 63.27%
825
+ [ Thu Sep 15 20:25:04 2022 ] Top5: 89.86%
826
+ [ Thu Sep 15 20:25:05 2022 ] Training epoch: 133
827
+ [ Thu Sep 15 20:25:32 2022 ] Batch(63/123) done. Loss: 0.0160 lr:0.000100 network_time: 0.0491
828
+ [ Thu Sep 15 20:25:53 2022 ] Eval epoch: 133
829
+ [ Thu Sep 15 20:26:16 2022 ] Mean test loss of 258 batches: 1.9667322635650635.
830
+ [ Thu Sep 15 20:26:16 2022 ] Top1: 61.92%
831
+ [ Thu Sep 15 20:26:16 2022 ] Top5: 88.80%
832
+ [ Thu Sep 15 20:26:16 2022 ] Training epoch: 134
833
+ [ Thu Sep 15 20:26:35 2022 ] Batch(40/123) done. Loss: 0.0103 lr:0.000100 network_time: 0.0457
834
+ [ Thu Sep 15 20:27:06 2022 ] Eval epoch: 134
835
+ [ Thu Sep 15 20:27:28 2022 ] Mean test loss of 258 batches: 1.7689836025238037.
836
+ [ Thu Sep 15 20:27:28 2022 ] Top1: 63.86%
837
+ [ Thu Sep 15 20:27:28 2022 ] Top5: 90.31%
838
+ [ Thu Sep 15 20:27:28 2022 ] Training epoch: 135
839
+ [ Thu Sep 15 20:27:38 2022 ] Batch(17/123) done. Loss: 0.0054 lr:0.000100 network_time: 0.0543
840
+ [ Thu Sep 15 20:28:15 2022 ] Batch(117/123) done. Loss: 0.0125 lr:0.000100 network_time: 0.0534
841
+ [ Thu Sep 15 20:28:17 2022 ] Eval epoch: 135
842
+ [ Thu Sep 15 20:28:39 2022 ] Mean test loss of 258 batches: 1.8875691890716553.
843
+ [ Thu Sep 15 20:28:39 2022 ] Top1: 62.49%
844
+ [ Thu Sep 15 20:28:39 2022 ] Top5: 89.44%
845
+ [ Thu Sep 15 20:28:39 2022 ] Training epoch: 136
846
+ [ Thu Sep 15 20:29:18 2022 ] Batch(94/123) done. Loss: 0.0068 lr:0.000100 network_time: 0.0499
847
+ [ Thu Sep 15 20:29:29 2022 ] Eval epoch: 136
848
+ [ Thu Sep 15 20:29:51 2022 ] Mean test loss of 258 batches: 2.03627610206604.
849
+ [ Thu Sep 15 20:29:51 2022 ] Top1: 60.84%
850
+ [ Thu Sep 15 20:29:51 2022 ] Top5: 88.41%
851
+ [ Thu Sep 15 20:29:51 2022 ] Training epoch: 137
852
+ [ Thu Sep 15 20:30:21 2022 ] Batch(71/123) done. Loss: 0.0097 lr:0.000100 network_time: 0.0516
853
+ [ Thu Sep 15 20:30:41 2022 ] Eval epoch: 137
854
+ [ Thu Sep 15 20:31:03 2022 ] Mean test loss of 258 batches: 1.8615801334381104.
855
+ [ Thu Sep 15 20:31:03 2022 ] Top1: 62.60%
856
+ [ Thu Sep 15 20:31:03 2022 ] Top5: 89.79%
857
+ [ Thu Sep 15 20:31:03 2022 ] Training epoch: 138
858
+ [ Thu Sep 15 20:31:24 2022 ] Batch(48/123) done. Loss: 0.0286 lr:0.000100 network_time: 0.0510
859
+ [ Thu Sep 15 20:31:52 2022 ] Eval epoch: 138
860
+ [ Thu Sep 15 20:32:14 2022 ] Mean test loss of 258 batches: 1.9517872333526611.
861
+ [ Thu Sep 15 20:32:15 2022 ] Top1: 61.99%
862
+ [ Thu Sep 15 20:32:15 2022 ] Top5: 89.02%
863
+ [ Thu Sep 15 20:32:15 2022 ] Training epoch: 139
864
+ [ Thu Sep 15 20:32:28 2022 ] Batch(25/123) done. Loss: 0.0040 lr:0.000100 network_time: 0.0492
865
+ [ Thu Sep 15 20:33:04 2022 ] Eval epoch: 139
866
+ [ Thu Sep 15 20:33:26 2022 ] Mean test loss of 258 batches: 1.9645498991012573.
867
+ [ Thu Sep 15 20:33:26 2022 ] Top1: 62.20%
868
+ [ Thu Sep 15 20:33:26 2022 ] Top5: 88.99%
869
+ [ Thu Sep 15 20:33:26 2022 ] Training epoch: 140
870
+ [ Thu Sep 15 20:33:31 2022 ] Batch(2/123) done. Loss: 0.0057 lr:0.000100 network_time: 0.0496
871
+ [ Thu Sep 15 20:34:07 2022 ] Batch(102/123) done. Loss: 0.0084 lr:0.000100 network_time: 0.0479
872
+ [ Thu Sep 15 20:34:15 2022 ] Eval epoch: 140
873
+ [ Thu Sep 15 20:34:37 2022 ] Mean test loss of 258 batches: 1.9595528841018677.
874
+ [ Thu Sep 15 20:34:37 2022 ] Top1: 61.40%
875
+ [ Thu Sep 15 20:34:37 2022 ] Top5: 88.99%
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_bone_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu_ShiftGCN_joint_motion_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/nturgbd-cross-subject/train_joint_motion.yaml
5
+ device:
6
+ - 6
7
+ - 7
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 60
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu_ShiftGCN_joint_motion_xsub
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu_ShiftGCN_joint_motion_xsub
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b69d62a09c755f915c0f11a163f96d8e0008156465c096689f02d91214028ee0
3
+ size 4979902
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/log.txt ADDED
@@ -0,0 +1,875 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 14 13:30:21 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu_ShiftGCN_joint_motion_xsub', 'model_saved_name': './save_models/ntu_ShiftGCN_joint_motion_xsub', 'Experiment_name': 'ntu_ShiftGCN_joint_motion_xsub', 'config': './config/nturgbd-cross-subject/train_joint_motion.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [6, 7], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Wed Sep 14 13:30:21 2022 ] Training epoch: 1
5
+ [ Wed Sep 14 13:31:39 2022 ] Batch(99/123) done. Loss: 1.8322 lr:0.100000 network_time: 0.0273
6
+ [ Wed Sep 14 13:31:56 2022 ] Eval epoch: 1
7
+ [ Wed Sep 14 13:32:29 2022 ] Mean test loss of 258 batches: 5.421380996704102.
8
+ [ Wed Sep 14 13:32:29 2022 ] Top1: 16.31%
9
+ [ Wed Sep 14 13:32:29 2022 ] Top5: 41.85%
10
+ [ Wed Sep 14 13:32:29 2022 ] Training epoch: 2
11
+ [ Wed Sep 14 13:33:28 2022 ] Batch(76/123) done. Loss: 1.7521 lr:0.100000 network_time: 0.0322
12
+ [ Wed Sep 14 13:34:02 2022 ] Eval epoch: 2
13
+ [ Wed Sep 14 13:34:35 2022 ] Mean test loss of 258 batches: 3.908324956893921.
14
+ [ Wed Sep 14 13:34:35 2022 ] Top1: 26.00%
15
+ [ Wed Sep 14 13:34:35 2022 ] Top5: 55.09%
16
+ [ Wed Sep 14 13:34:35 2022 ] Training epoch: 3
17
+ [ Wed Sep 14 13:35:17 2022 ] Batch(53/123) done. Loss: 1.3974 lr:0.100000 network_time: 0.0291
18
+ [ Wed Sep 14 13:36:08 2022 ] Eval epoch: 3
19
+ [ Wed Sep 14 13:36:41 2022 ] Mean test loss of 258 batches: 3.634312391281128.
20
+ [ Wed Sep 14 13:36:41 2022 ] Top1: 31.05%
21
+ [ Wed Sep 14 13:36:41 2022 ] Top5: 64.15%
22
+ [ Wed Sep 14 13:36:41 2022 ] Training epoch: 4
23
+ [ Wed Sep 14 13:37:07 2022 ] Batch(30/123) done. Loss: 1.4544 lr:0.100000 network_time: 0.0273
24
+ [ Wed Sep 14 13:38:14 2022 ] Eval epoch: 4
25
+ [ Wed Sep 14 13:38:47 2022 ] Mean test loss of 258 batches: 3.0245800018310547.
26
+ [ Wed Sep 14 13:38:47 2022 ] Top1: 37.07%
27
+ [ Wed Sep 14 13:38:47 2022 ] Top5: 71.13%
28
+ [ Wed Sep 14 13:38:47 2022 ] Training epoch: 5
29
+ [ Wed Sep 14 13:38:56 2022 ] Batch(7/123) done. Loss: 1.3769 lr:0.100000 network_time: 0.0284
30
+ [ Wed Sep 14 13:40:09 2022 ] Batch(107/123) done. Loss: 0.9981 lr:0.100000 network_time: 0.0323
31
+ [ Wed Sep 14 13:40:20 2022 ] Eval epoch: 5
32
+ [ Wed Sep 14 13:40:53 2022 ] Mean test loss of 258 batches: 3.6688332557678223.
33
+ [ Wed Sep 14 13:40:53 2022 ] Top1: 37.91%
34
+ [ Wed Sep 14 13:40:53 2022 ] Top5: 72.09%
35
+ [ Wed Sep 14 13:40:53 2022 ] Training epoch: 6
36
+ [ Wed Sep 14 13:41:58 2022 ] Batch(84/123) done. Loss: 1.0236 lr:0.100000 network_time: 0.0286
37
+ [ Wed Sep 14 13:42:26 2022 ] Eval epoch: 6
38
+ [ Wed Sep 14 13:42:59 2022 ] Mean test loss of 258 batches: 3.80401349067688.
39
+ [ Wed Sep 14 13:42:59 2022 ] Top1: 35.28%
40
+ [ Wed Sep 14 13:42:59 2022 ] Top5: 69.93%
41
+ [ Wed Sep 14 13:42:59 2022 ] Training epoch: 7
42
+ [ Wed Sep 14 13:43:48 2022 ] Batch(61/123) done. Loss: 0.8047 lr:0.100000 network_time: 0.0289
43
+ [ Wed Sep 14 13:44:32 2022 ] Eval epoch: 7
44
+ [ Wed Sep 14 13:45:06 2022 ] Mean test loss of 258 batches: 2.357377290725708.
45
+ [ Wed Sep 14 13:45:06 2022 ] Top1: 47.07%
46
+ [ Wed Sep 14 13:45:06 2022 ] Top5: 79.77%
47
+ [ Wed Sep 14 13:45:06 2022 ] Training epoch: 8
48
+ [ Wed Sep 14 13:45:37 2022 ] Batch(38/123) done. Loss: 0.9309 lr:0.100000 network_time: 0.0315
49
+ [ Wed Sep 14 13:46:39 2022 ] Eval epoch: 8
50
+ [ Wed Sep 14 13:47:12 2022 ] Mean test loss of 258 batches: 2.9091107845306396.
51
+ [ Wed Sep 14 13:47:12 2022 ] Top1: 44.64%
52
+ [ Wed Sep 14 13:47:12 2022 ] Top5: 77.13%
53
+ [ Wed Sep 14 13:47:13 2022 ] Training epoch: 9
54
+ [ Wed Sep 14 13:47:28 2022 ] Batch(15/123) done. Loss: 0.7382 lr:0.100000 network_time: 0.0265
55
+ [ Wed Sep 14 13:48:40 2022 ] Batch(115/123) done. Loss: 0.6233 lr:0.100000 network_time: 0.0314
56
+ [ Wed Sep 14 13:48:46 2022 ] Eval epoch: 9
57
+ [ Wed Sep 14 13:49:19 2022 ] Mean test loss of 258 batches: 2.300452947616577.
58
+ [ Wed Sep 14 13:49:19 2022 ] Top1: 49.20%
59
+ [ Wed Sep 14 13:49:19 2022 ] Top5: 83.79%
60
+ [ Wed Sep 14 13:49:19 2022 ] Training epoch: 10
61
+ [ Wed Sep 14 13:50:30 2022 ] Batch(92/123) done. Loss: 0.8082 lr:0.100000 network_time: 0.0325
62
+ [ Wed Sep 14 13:50:52 2022 ] Eval epoch: 10
63
+ [ Wed Sep 14 13:51:25 2022 ] Mean test loss of 258 batches: 2.6167848110198975.
64
+ [ Wed Sep 14 13:51:25 2022 ] Top1: 40.61%
65
+ [ Wed Sep 14 13:51:25 2022 ] Top5: 75.19%
66
+ [ Wed Sep 14 13:51:25 2022 ] Training epoch: 11
67
+ [ Wed Sep 14 13:52:19 2022 ] Batch(69/123) done. Loss: 0.7017 lr:0.100000 network_time: 0.0365
68
+ [ Wed Sep 14 13:52:58 2022 ] Eval epoch: 11
69
+ [ Wed Sep 14 13:53:31 2022 ] Mean test loss of 258 batches: 2.6783909797668457.
70
+ [ Wed Sep 14 13:53:31 2022 ] Top1: 44.78%
71
+ [ Wed Sep 14 13:53:31 2022 ] Top5: 79.88%
72
+ [ Wed Sep 14 13:53:31 2022 ] Training epoch: 12
73
+ [ Wed Sep 14 13:54:09 2022 ] Batch(46/123) done. Loss: 0.6095 lr:0.100000 network_time: 0.0268
74
+ [ Wed Sep 14 13:55:04 2022 ] Eval epoch: 12
75
+ [ Wed Sep 14 13:55:38 2022 ] Mean test loss of 258 batches: 3.100949764251709.
76
+ [ Wed Sep 14 13:55:38 2022 ] Top1: 46.73%
77
+ [ Wed Sep 14 13:55:38 2022 ] Top5: 81.48%
78
+ [ Wed Sep 14 13:55:38 2022 ] Training epoch: 13
79
+ [ Wed Sep 14 13:55:59 2022 ] Batch(23/123) done. Loss: 0.3895 lr:0.100000 network_time: 0.0285
80
+ [ Wed Sep 14 13:57:11 2022 ] Eval epoch: 13
81
+ [ Wed Sep 14 13:57:44 2022 ] Mean test loss of 258 batches: 2.872067451477051.
82
+ [ Wed Sep 14 13:57:44 2022 ] Top1: 42.58%
83
+ [ Wed Sep 14 13:57:44 2022 ] Top5: 77.13%
84
+ [ Wed Sep 14 13:57:44 2022 ] Training epoch: 14
85
+ [ Wed Sep 14 13:57:48 2022 ] Batch(0/123) done. Loss: 0.3612 lr:0.100000 network_time: 0.0457
86
+ [ Wed Sep 14 13:59:01 2022 ] Batch(100/123) done. Loss: 0.4800 lr:0.100000 network_time: 0.0278
87
+ [ Wed Sep 14 13:59:17 2022 ] Eval epoch: 14
88
+ [ Wed Sep 14 13:59:49 2022 ] Mean test loss of 258 batches: 3.2104737758636475.
89
+ [ Wed Sep 14 13:59:50 2022 ] Top1: 52.74%
90
+ [ Wed Sep 14 13:59:50 2022 ] Top5: 83.59%
91
+ [ Wed Sep 14 13:59:50 2022 ] Training epoch: 15
92
+ [ Wed Sep 14 14:00:49 2022 ] Batch(77/123) done. Loss: 0.5065 lr:0.100000 network_time: 0.0308
93
+ [ Wed Sep 14 14:01:22 2022 ] Eval epoch: 15
94
+ [ Wed Sep 14 14:01:55 2022 ] Mean test loss of 258 batches: 2.190570116043091.
95
+ [ Wed Sep 14 14:01:55 2022 ] Top1: 53.07%
96
+ [ Wed Sep 14 14:01:55 2022 ] Top5: 85.18%
97
+ [ Wed Sep 14 14:01:55 2022 ] Training epoch: 16
98
+ [ Wed Sep 14 14:02:39 2022 ] Batch(54/123) done. Loss: 0.4220 lr:0.100000 network_time: 0.0281
99
+ [ Wed Sep 14 14:03:28 2022 ] Eval epoch: 16
100
+ [ Wed Sep 14 14:04:02 2022 ] Mean test loss of 258 batches: 2.1734108924865723.
101
+ [ Wed Sep 14 14:04:02 2022 ] Top1: 47.49%
102
+ [ Wed Sep 14 14:04:02 2022 ] Top5: 81.16%
103
+ [ Wed Sep 14 14:04:02 2022 ] Training epoch: 17
104
+ [ Wed Sep 14 14:04:28 2022 ] Batch(31/123) done. Loss: 0.5759 lr:0.100000 network_time: 0.0318
105
+ [ Wed Sep 14 14:05:35 2022 ] Eval epoch: 17
106
+ [ Wed Sep 14 14:06:08 2022 ] Mean test loss of 258 batches: 3.361882209777832.
107
+ [ Wed Sep 14 14:06:08 2022 ] Top1: 44.20%
108
+ [ Wed Sep 14 14:06:08 2022 ] Top5: 79.01%
109
+ [ Wed Sep 14 14:06:08 2022 ] Training epoch: 18
110
+ [ Wed Sep 14 14:06:18 2022 ] Batch(8/123) done. Loss: 0.2408 lr:0.100000 network_time: 0.0305
111
+ [ Wed Sep 14 14:07:30 2022 ] Batch(108/123) done. Loss: 0.3474 lr:0.100000 network_time: 0.0312
112
+ [ Wed Sep 14 14:07:41 2022 ] Eval epoch: 18
113
+ [ Wed Sep 14 14:08:14 2022 ] Mean test loss of 258 batches: 1.9666603803634644.
114
+ [ Wed Sep 14 14:08:14 2022 ] Top1: 53.97%
115
+ [ Wed Sep 14 14:08:14 2022 ] Top5: 85.68%
116
+ [ Wed Sep 14 14:08:14 2022 ] Training epoch: 19
117
+ [ Wed Sep 14 14:09:20 2022 ] Batch(85/123) done. Loss: 0.3814 lr:0.100000 network_time: 0.0276
118
+ [ Wed Sep 14 14:09:47 2022 ] Eval epoch: 19
119
+ [ Wed Sep 14 14:10:20 2022 ] Mean test loss of 258 batches: 2.295517921447754.
120
+ [ Wed Sep 14 14:10:20 2022 ] Top1: 53.63%
121
+ [ Wed Sep 14 14:10:20 2022 ] Top5: 84.36%
122
+ [ Wed Sep 14 14:10:20 2022 ] Training epoch: 20
123
+ [ Wed Sep 14 14:11:09 2022 ] Batch(62/123) done. Loss: 0.2456 lr:0.100000 network_time: 0.0278
124
+ [ Wed Sep 14 14:11:52 2022 ] Eval epoch: 20
125
+ [ Wed Sep 14 14:12:25 2022 ] Mean test loss of 258 batches: 2.4664697647094727.
126
+ [ Wed Sep 14 14:12:25 2022 ] Top1: 47.50%
127
+ [ Wed Sep 14 14:12:25 2022 ] Top5: 82.88%
128
+ [ Wed Sep 14 14:12:25 2022 ] Training epoch: 21
129
+ [ Wed Sep 14 14:12:58 2022 ] Batch(39/123) done. Loss: 0.4303 lr:0.100000 network_time: 0.0276
130
+ [ Wed Sep 14 14:13:58 2022 ] Eval epoch: 21
131
+ [ Wed Sep 14 14:14:31 2022 ] Mean test loss of 258 batches: 2.3439109325408936.
132
+ [ Wed Sep 14 14:14:31 2022 ] Top1: 52.30%
133
+ [ Wed Sep 14 14:14:32 2022 ] Top5: 86.02%
134
+ [ Wed Sep 14 14:14:32 2022 ] Training epoch: 22
135
+ [ Wed Sep 14 14:14:47 2022 ] Batch(16/123) done. Loss: 0.2752 lr:0.100000 network_time: 0.0420
136
+ [ Wed Sep 14 14:16:00 2022 ] Batch(116/123) done. Loss: 0.2960 lr:0.100000 network_time: 0.0303
137
+ [ Wed Sep 14 14:16:04 2022 ] Eval epoch: 22
138
+ [ Wed Sep 14 14:16:37 2022 ] Mean test loss of 258 batches: 2.3402199745178223.
139
+ [ Wed Sep 14 14:16:38 2022 ] Top1: 49.31%
140
+ [ Wed Sep 14 14:16:38 2022 ] Top5: 82.39%
141
+ [ Wed Sep 14 14:16:38 2022 ] Training epoch: 23
142
+ [ Wed Sep 14 14:17:49 2022 ] Batch(93/123) done. Loss: 0.5902 lr:0.100000 network_time: 0.0383
143
+ [ Wed Sep 14 14:18:11 2022 ] Eval epoch: 23
144
+ [ Wed Sep 14 14:18:44 2022 ] Mean test loss of 258 batches: 3.4021964073181152.
145
+ [ Wed Sep 14 14:18:44 2022 ] Top1: 44.00%
146
+ [ Wed Sep 14 14:18:44 2022 ] Top5: 78.38%
147
+ [ Wed Sep 14 14:18:44 2022 ] Training epoch: 24
148
+ [ Wed Sep 14 14:19:39 2022 ] Batch(70/123) done. Loss: 0.2657 lr:0.100000 network_time: 0.0348
149
+ [ Wed Sep 14 14:20:17 2022 ] Eval epoch: 24
150
+ [ Wed Sep 14 14:20:50 2022 ] Mean test loss of 258 batches: 2.8668456077575684.
151
+ [ Wed Sep 14 14:20:50 2022 ] Top1: 48.26%
152
+ [ Wed Sep 14 14:20:50 2022 ] Top5: 82.93%
153
+ [ Wed Sep 14 14:20:50 2022 ] Training epoch: 25
154
+ [ Wed Sep 14 14:21:28 2022 ] Batch(47/123) done. Loss: 0.3772 lr:0.100000 network_time: 0.0304
155
+ [ Wed Sep 14 14:22:23 2022 ] Eval epoch: 25
156
+ [ Wed Sep 14 14:22:56 2022 ] Mean test loss of 258 batches: 3.23077130317688.
157
+ [ Wed Sep 14 14:22:56 2022 ] Top1: 46.24%
158
+ [ Wed Sep 14 14:22:56 2022 ] Top5: 78.16%
159
+ [ Wed Sep 14 14:22:56 2022 ] Training epoch: 26
160
+ [ Wed Sep 14 14:23:17 2022 ] Batch(24/123) done. Loss: 0.3657 lr:0.100000 network_time: 0.0288
161
+ [ Wed Sep 14 14:24:29 2022 ] Eval epoch: 26
162
+ [ Wed Sep 14 14:25:02 2022 ] Mean test loss of 258 batches: 2.7416818141937256.
163
+ [ Wed Sep 14 14:25:02 2022 ] Top1: 51.59%
164
+ [ Wed Sep 14 14:25:03 2022 ] Top5: 82.94%
165
+ [ Wed Sep 14 14:25:03 2022 ] Training epoch: 27
166
+ [ Wed Sep 14 14:25:07 2022 ] Batch(1/123) done. Loss: 0.1882 lr:0.100000 network_time: 0.0299
167
+ [ Wed Sep 14 14:26:20 2022 ] Batch(101/123) done. Loss: 0.2472 lr:0.100000 network_time: 0.0290
168
+ [ Wed Sep 14 14:26:36 2022 ] Eval epoch: 27
169
+ [ Wed Sep 14 14:27:08 2022 ] Mean test loss of 258 batches: 2.7824652194976807.
170
+ [ Wed Sep 14 14:27:09 2022 ] Top1: 46.99%
171
+ [ Wed Sep 14 14:27:09 2022 ] Top5: 80.48%
172
+ [ Wed Sep 14 14:27:09 2022 ] Training epoch: 28
173
+ [ Wed Sep 14 14:28:09 2022 ] Batch(78/123) done. Loss: 0.4206 lr:0.100000 network_time: 0.0333
174
+ [ Wed Sep 14 14:28:41 2022 ] Eval epoch: 28
175
+ [ Wed Sep 14 14:29:14 2022 ] Mean test loss of 258 batches: 2.5910394191741943.
176
+ [ Wed Sep 14 14:29:14 2022 ] Top1: 52.99%
177
+ [ Wed Sep 14 14:29:14 2022 ] Top5: 84.35%
178
+ [ Wed Sep 14 14:29:14 2022 ] Training epoch: 29
179
+ [ Wed Sep 14 14:29:58 2022 ] Batch(55/123) done. Loss: 0.4513 lr:0.100000 network_time: 0.0272
180
+ [ Wed Sep 14 14:30:47 2022 ] Eval epoch: 29
181
+ [ Wed Sep 14 14:31:19 2022 ] Mean test loss of 258 batches: 3.8707644939422607.
182
+ [ Wed Sep 14 14:31:19 2022 ] Top1: 42.49%
183
+ [ Wed Sep 14 14:31:19 2022 ] Top5: 75.29%
184
+ [ Wed Sep 14 14:31:19 2022 ] Training epoch: 30
185
+ [ Wed Sep 14 14:31:46 2022 ] Batch(32/123) done. Loss: 0.2088 lr:0.100000 network_time: 0.0335
186
+ [ Wed Sep 14 14:32:52 2022 ] Eval epoch: 30
187
+ [ Wed Sep 14 14:33:25 2022 ] Mean test loss of 258 batches: 2.0434672832489014.
188
+ [ Wed Sep 14 14:33:25 2022 ] Top1: 53.36%
189
+ [ Wed Sep 14 14:33:25 2022 ] Top5: 85.15%
190
+ [ Wed Sep 14 14:33:25 2022 ] Training epoch: 31
191
+ [ Wed Sep 14 14:33:36 2022 ] Batch(9/123) done. Loss: 0.1743 lr:0.100000 network_time: 0.0293
192
+ [ Wed Sep 14 14:34:48 2022 ] Batch(109/123) done. Loss: 0.1601 lr:0.100000 network_time: 0.0328
193
+ [ Wed Sep 14 14:34:58 2022 ] Eval epoch: 31
194
+ [ Wed Sep 14 14:35:30 2022 ] Mean test loss of 258 batches: 2.6095051765441895.
195
+ [ Wed Sep 14 14:35:31 2022 ] Top1: 52.76%
196
+ [ Wed Sep 14 14:35:31 2022 ] Top5: 83.31%
197
+ [ Wed Sep 14 14:35:31 2022 ] Training epoch: 32
198
+ [ Wed Sep 14 14:36:37 2022 ] Batch(86/123) done. Loss: 0.2414 lr:0.100000 network_time: 0.0278
199
+ [ Wed Sep 14 14:37:03 2022 ] Eval epoch: 32
200
+ [ Wed Sep 14 14:37:36 2022 ] Mean test loss of 258 batches: 2.538578510284424.
201
+ [ Wed Sep 14 14:37:36 2022 ] Top1: 53.28%
202
+ [ Wed Sep 14 14:37:36 2022 ] Top5: 85.77%
203
+ [ Wed Sep 14 14:37:36 2022 ] Training epoch: 33
204
+ [ Wed Sep 14 14:38:26 2022 ] Batch(63/123) done. Loss: 0.3211 lr:0.100000 network_time: 0.0277
205
+ [ Wed Sep 14 14:39:09 2022 ] Eval epoch: 33
206
+ [ Wed Sep 14 14:39:41 2022 ] Mean test loss of 258 batches: 2.6285548210144043.
207
+ [ Wed Sep 14 14:39:41 2022 ] Top1: 52.92%
208
+ [ Wed Sep 14 14:39:42 2022 ] Top5: 84.96%
209
+ [ Wed Sep 14 14:39:42 2022 ] Training epoch: 34
210
+ [ Wed Sep 14 14:40:15 2022 ] Batch(40/123) done. Loss: 0.1880 lr:0.100000 network_time: 0.0280
211
+ [ Wed Sep 14 14:41:14 2022 ] Eval epoch: 34
212
+ [ Wed Sep 14 14:41:47 2022 ] Mean test loss of 258 batches: 2.0833334922790527.
213
+ [ Wed Sep 14 14:41:47 2022 ] Top1: 53.60%
214
+ [ Wed Sep 14 14:41:47 2022 ] Top5: 86.80%
215
+ [ Wed Sep 14 14:41:47 2022 ] Training epoch: 35
216
+ [ Wed Sep 14 14:42:03 2022 ] Batch(17/123) done. Loss: 0.1468 lr:0.100000 network_time: 0.0350
217
+ [ Wed Sep 14 14:43:16 2022 ] Batch(117/123) done. Loss: 0.2022 lr:0.100000 network_time: 0.0279
218
+ [ Wed Sep 14 14:43:20 2022 ] Eval epoch: 35
219
+ [ Wed Sep 14 14:43:52 2022 ] Mean test loss of 258 batches: 2.6050968170166016.
220
+ [ Wed Sep 14 14:43:52 2022 ] Top1: 49.00%
221
+ [ Wed Sep 14 14:43:53 2022 ] Top5: 81.38%
222
+ [ Wed Sep 14 14:43:53 2022 ] Training epoch: 36
223
+ [ Wed Sep 14 14:45:05 2022 ] Batch(94/123) done. Loss: 0.3228 lr:0.100000 network_time: 0.0292
224
+ [ Wed Sep 14 14:45:25 2022 ] Eval epoch: 36
225
+ [ Wed Sep 14 14:45:58 2022 ] Mean test loss of 258 batches: 2.1508259773254395.
226
+ [ Wed Sep 14 14:45:58 2022 ] Top1: 56.06%
227
+ [ Wed Sep 14 14:45:58 2022 ] Top5: 84.73%
228
+ [ Wed Sep 14 14:45:58 2022 ] Training epoch: 37
229
+ [ Wed Sep 14 14:46:54 2022 ] Batch(71/123) done. Loss: 0.5055 lr:0.100000 network_time: 0.0277
230
+ [ Wed Sep 14 14:47:31 2022 ] Eval epoch: 37
231
+ [ Wed Sep 14 14:48:04 2022 ] Mean test loss of 258 batches: 2.3189916610717773.
232
+ [ Wed Sep 14 14:48:04 2022 ] Top1: 55.84%
233
+ [ Wed Sep 14 14:48:04 2022 ] Top5: 84.73%
234
+ [ Wed Sep 14 14:48:04 2022 ] Training epoch: 38
235
+ [ Wed Sep 14 14:48:43 2022 ] Batch(48/123) done. Loss: 0.2248 lr:0.100000 network_time: 0.0281
236
+ [ Wed Sep 14 14:49:37 2022 ] Eval epoch: 38
237
+ [ Wed Sep 14 14:50:10 2022 ] Mean test loss of 258 batches: 3.328526496887207.
238
+ [ Wed Sep 14 14:50:10 2022 ] Top1: 40.93%
239
+ [ Wed Sep 14 14:50:10 2022 ] Top5: 77.16%
240
+ [ Wed Sep 14 14:50:10 2022 ] Training epoch: 39
241
+ [ Wed Sep 14 14:50:32 2022 ] Batch(25/123) done. Loss: 0.1325 lr:0.100000 network_time: 0.0321
242
+ [ Wed Sep 14 14:51:42 2022 ] Eval epoch: 39
243
+ [ Wed Sep 14 14:52:15 2022 ] Mean test loss of 258 batches: 3.4714365005493164.
244
+ [ Wed Sep 14 14:52:15 2022 ] Top1: 44.80%
245
+ [ Wed Sep 14 14:52:15 2022 ] Top5: 79.50%
246
+ [ Wed Sep 14 14:52:15 2022 ] Training epoch: 40
247
+ [ Wed Sep 14 14:52:20 2022 ] Batch(2/123) done. Loss: 0.2373 lr:0.100000 network_time: 0.0266
248
+ [ Wed Sep 14 14:53:33 2022 ] Batch(102/123) done. Loss: 0.2601 lr:0.100000 network_time: 0.0275
249
+ [ Wed Sep 14 14:53:48 2022 ] Eval epoch: 40
250
+ [ Wed Sep 14 14:54:20 2022 ] Mean test loss of 258 batches: 2.6973369121551514.
251
+ [ Wed Sep 14 14:54:20 2022 ] Top1: 50.18%
252
+ [ Wed Sep 14 14:54:21 2022 ] Top5: 82.68%
253
+ [ Wed Sep 14 14:54:21 2022 ] Training epoch: 41
254
+ [ Wed Sep 14 14:55:22 2022 ] Batch(79/123) done. Loss: 0.3010 lr:0.100000 network_time: 0.0268
255
+ [ Wed Sep 14 14:55:53 2022 ] Eval epoch: 41
256
+ [ Wed Sep 14 14:56:26 2022 ] Mean test loss of 258 batches: 2.5908446311950684.
257
+ [ Wed Sep 14 14:56:26 2022 ] Top1: 51.38%
258
+ [ Wed Sep 14 14:56:26 2022 ] Top5: 84.08%
259
+ [ Wed Sep 14 14:56:26 2022 ] Training epoch: 42
260
+ [ Wed Sep 14 14:57:11 2022 ] Batch(56/123) done. Loss: 0.3432 lr:0.100000 network_time: 0.0267
261
+ [ Wed Sep 14 14:57:59 2022 ] Eval epoch: 42
262
+ [ Wed Sep 14 14:58:32 2022 ] Mean test loss of 258 batches: 2.8294875621795654.
263
+ [ Wed Sep 14 14:58:32 2022 ] Top1: 52.04%
264
+ [ Wed Sep 14 14:58:32 2022 ] Top5: 83.27%
265
+ [ Wed Sep 14 14:58:32 2022 ] Training epoch: 43
266
+ [ Wed Sep 14 14:59:00 2022 ] Batch(33/123) done. Loss: 0.0792 lr:0.100000 network_time: 0.0264
267
+ [ Wed Sep 14 15:00:04 2022 ] Eval epoch: 43
268
+ [ Wed Sep 14 15:00:37 2022 ] Mean test loss of 258 batches: 2.162405252456665.
269
+ [ Wed Sep 14 15:00:37 2022 ] Top1: 55.95%
270
+ [ Wed Sep 14 15:00:37 2022 ] Top5: 86.15%
271
+ [ Wed Sep 14 15:00:37 2022 ] Training epoch: 44
272
+ [ Wed Sep 14 15:00:48 2022 ] Batch(10/123) done. Loss: 0.1217 lr:0.100000 network_time: 0.0304
273
+ [ Wed Sep 14 15:02:01 2022 ] Batch(110/123) done. Loss: 0.1810 lr:0.100000 network_time: 0.0331
274
+ [ Wed Sep 14 15:02:10 2022 ] Eval epoch: 44
275
+ [ Wed Sep 14 15:02:42 2022 ] Mean test loss of 258 batches: 2.7251181602478027.
276
+ [ Wed Sep 14 15:02:43 2022 ] Top1: 51.24%
277
+ [ Wed Sep 14 15:02:43 2022 ] Top5: 83.48%
278
+ [ Wed Sep 14 15:02:43 2022 ] Training epoch: 45
279
+ [ Wed Sep 14 15:03:49 2022 ] Batch(87/123) done. Loss: 0.1741 lr:0.100000 network_time: 0.0263
280
+ [ Wed Sep 14 15:04:15 2022 ] Eval epoch: 45
281
+ [ Wed Sep 14 15:04:48 2022 ] Mean test loss of 258 batches: 2.7098324298858643.
282
+ [ Wed Sep 14 15:04:48 2022 ] Top1: 51.33%
283
+ [ Wed Sep 14 15:04:48 2022 ] Top5: 82.89%
284
+ [ Wed Sep 14 15:04:48 2022 ] Training epoch: 46
285
+ [ Wed Sep 14 15:05:38 2022 ] Batch(64/123) done. Loss: 0.1355 lr:0.100000 network_time: 0.0331
286
+ [ Wed Sep 14 15:06:20 2022 ] Eval epoch: 46
287
+ [ Wed Sep 14 15:06:53 2022 ] Mean test loss of 258 batches: 2.519627332687378.
288
+ [ Wed Sep 14 15:06:53 2022 ] Top1: 54.52%
289
+ [ Wed Sep 14 15:06:53 2022 ] Top5: 84.55%
290
+ [ Wed Sep 14 15:06:53 2022 ] Training epoch: 47
291
+ [ Wed Sep 14 15:07:27 2022 ] Batch(41/123) done. Loss: 0.3258 lr:0.100000 network_time: 0.0264
292
+ [ Wed Sep 14 15:08:26 2022 ] Eval epoch: 47
293
+ [ Wed Sep 14 15:08:59 2022 ] Mean test loss of 258 batches: 2.6793875694274902.
294
+ [ Wed Sep 14 15:08:59 2022 ] Top1: 49.95%
295
+ [ Wed Sep 14 15:08:59 2022 ] Top5: 81.57%
296
+ [ Wed Sep 14 15:08:59 2022 ] Training epoch: 48
297
+ [ Wed Sep 14 15:09:16 2022 ] Batch(18/123) done. Loss: 0.1776 lr:0.100000 network_time: 0.0327
298
+ [ Wed Sep 14 15:10:28 2022 ] Batch(118/123) done. Loss: 0.1763 lr:0.100000 network_time: 0.0273
299
+ [ Wed Sep 14 15:10:31 2022 ] Eval epoch: 48
300
+ [ Wed Sep 14 15:11:04 2022 ] Mean test loss of 258 batches: 2.500056505203247.
301
+ [ Wed Sep 14 15:11:04 2022 ] Top1: 53.82%
302
+ [ Wed Sep 14 15:11:04 2022 ] Top5: 79.20%
303
+ [ Wed Sep 14 15:11:05 2022 ] Training epoch: 49
304
+ [ Wed Sep 14 15:12:17 2022 ] Batch(95/123) done. Loss: 0.1675 lr:0.100000 network_time: 0.0305
305
+ [ Wed Sep 14 15:12:37 2022 ] Eval epoch: 49
306
+ [ Wed Sep 14 15:13:10 2022 ] Mean test loss of 258 batches: 2.3841023445129395.
307
+ [ Wed Sep 14 15:13:10 2022 ] Top1: 55.69%
308
+ [ Wed Sep 14 15:13:10 2022 ] Top5: 85.04%
309
+ [ Wed Sep 14 15:13:10 2022 ] Training epoch: 50
310
+ [ Wed Sep 14 15:14:06 2022 ] Batch(72/123) done. Loss: 0.2073 lr:0.100000 network_time: 0.0268
311
+ [ Wed Sep 14 15:14:43 2022 ] Eval epoch: 50
312
+ [ Wed Sep 14 15:15:16 2022 ] Mean test loss of 258 batches: 2.200166702270508.
313
+ [ Wed Sep 14 15:15:16 2022 ] Top1: 53.15%
314
+ [ Wed Sep 14 15:15:16 2022 ] Top5: 85.08%
315
+ [ Wed Sep 14 15:15:16 2022 ] Training epoch: 51
316
+ [ Wed Sep 14 15:15:55 2022 ] Batch(49/123) done. Loss: 0.2201 lr:0.100000 network_time: 0.0314
317
+ [ Wed Sep 14 15:16:48 2022 ] Eval epoch: 51
318
+ [ Wed Sep 14 15:17:21 2022 ] Mean test loss of 258 batches: 2.511787176132202.
319
+ [ Wed Sep 14 15:17:21 2022 ] Top1: 54.19%
320
+ [ Wed Sep 14 15:17:21 2022 ] Top5: 86.07%
321
+ [ Wed Sep 14 15:17:21 2022 ] Training epoch: 52
322
+ [ Wed Sep 14 15:17:44 2022 ] Batch(26/123) done. Loss: 0.2281 lr:0.100000 network_time: 0.0261
323
+ [ Wed Sep 14 15:18:54 2022 ] Eval epoch: 52
324
+ [ Wed Sep 14 15:19:26 2022 ] Mean test loss of 258 batches: 2.6677470207214355.
325
+ [ Wed Sep 14 15:19:26 2022 ] Top1: 52.61%
326
+ [ Wed Sep 14 15:19:26 2022 ] Top5: 83.11%
327
+ [ Wed Sep 14 15:19:26 2022 ] Training epoch: 53
328
+ [ Wed Sep 14 15:19:32 2022 ] Batch(3/123) done. Loss: 0.0533 lr:0.100000 network_time: 0.0331
329
+ [ Wed Sep 14 15:20:45 2022 ] Batch(103/123) done. Loss: 0.1253 lr:0.100000 network_time: 0.0315
330
+ [ Wed Sep 14 15:20:59 2022 ] Eval epoch: 53
331
+ [ Wed Sep 14 15:21:31 2022 ] Mean test loss of 258 batches: 2.311555862426758.
332
+ [ Wed Sep 14 15:21:31 2022 ] Top1: 51.09%
333
+ [ Wed Sep 14 15:21:31 2022 ] Top5: 84.42%
334
+ [ Wed Sep 14 15:21:32 2022 ] Training epoch: 54
335
+ [ Wed Sep 14 15:22:33 2022 ] Batch(80/123) done. Loss: 0.3264 lr:0.100000 network_time: 0.0266
336
+ [ Wed Sep 14 15:23:04 2022 ] Eval epoch: 54
337
+ [ Wed Sep 14 15:23:37 2022 ] Mean test loss of 258 batches: 1.9971171617507935.
338
+ [ Wed Sep 14 15:23:37 2022 ] Top1: 58.15%
339
+ [ Wed Sep 14 15:23:37 2022 ] Top5: 87.86%
340
+ [ Wed Sep 14 15:23:37 2022 ] Training epoch: 55
341
+ [ Wed Sep 14 15:24:23 2022 ] Batch(57/123) done. Loss: 0.1479 lr:0.100000 network_time: 0.0307
342
+ [ Wed Sep 14 15:25:10 2022 ] Eval epoch: 55
343
+ [ Wed Sep 14 15:25:43 2022 ] Mean test loss of 258 batches: 2.5352351665496826.
344
+ [ Wed Sep 14 15:25:43 2022 ] Top1: 51.66%
345
+ [ Wed Sep 14 15:25:43 2022 ] Top5: 83.76%
346
+ [ Wed Sep 14 15:25:43 2022 ] Training epoch: 56
347
+ [ Wed Sep 14 15:26:12 2022 ] Batch(34/123) done. Loss: 0.0831 lr:0.100000 network_time: 0.0305
348
+ [ Wed Sep 14 15:27:16 2022 ] Eval epoch: 56
349
+ [ Wed Sep 14 15:27:48 2022 ] Mean test loss of 258 batches: 3.105311632156372.
350
+ [ Wed Sep 14 15:27:48 2022 ] Top1: 46.59%
351
+ [ Wed Sep 14 15:27:48 2022 ] Top5: 78.11%
352
+ [ Wed Sep 14 15:27:48 2022 ] Training epoch: 57
353
+ [ Wed Sep 14 15:28:00 2022 ] Batch(11/123) done. Loss: 0.2058 lr:0.100000 network_time: 0.0274
354
+ [ Wed Sep 14 15:29:13 2022 ] Batch(111/123) done. Loss: 0.0805 lr:0.100000 network_time: 0.0269
355
+ [ Wed Sep 14 15:29:21 2022 ] Eval epoch: 57
356
+ [ Wed Sep 14 15:29:53 2022 ] Mean test loss of 258 batches: 2.3883368968963623.
357
+ [ Wed Sep 14 15:29:53 2022 ] Top1: 55.04%
358
+ [ Wed Sep 14 15:29:53 2022 ] Top5: 84.45%
359
+ [ Wed Sep 14 15:29:53 2022 ] Training epoch: 58
360
+ [ Wed Sep 14 15:31:01 2022 ] Batch(88/123) done. Loss: 0.1361 lr:0.100000 network_time: 0.0269
361
+ [ Wed Sep 14 15:31:26 2022 ] Eval epoch: 58
362
+ [ Wed Sep 14 15:31:59 2022 ] Mean test loss of 258 batches: 2.499546527862549.
363
+ [ Wed Sep 14 15:31:59 2022 ] Top1: 50.06%
364
+ [ Wed Sep 14 15:31:59 2022 ] Top5: 81.28%
365
+ [ Wed Sep 14 15:31:59 2022 ] Training epoch: 59
366
+ [ Wed Sep 14 15:32:50 2022 ] Batch(65/123) done. Loss: 0.1671 lr:0.100000 network_time: 0.0313
367
+ [ Wed Sep 14 15:33:32 2022 ] Eval epoch: 59
368
+ [ Wed Sep 14 15:34:04 2022 ] Mean test loss of 258 batches: 2.3240156173706055.
369
+ [ Wed Sep 14 15:34:04 2022 ] Top1: 55.70%
370
+ [ Wed Sep 14 15:34:05 2022 ] Top5: 86.14%
371
+ [ Wed Sep 14 15:34:05 2022 ] Training epoch: 60
372
+ [ Wed Sep 14 15:34:39 2022 ] Batch(42/123) done. Loss: 0.1858 lr:0.100000 network_time: 0.0314
373
+ [ Wed Sep 14 15:35:37 2022 ] Eval epoch: 60
374
+ [ Wed Sep 14 15:36:10 2022 ] Mean test loss of 258 batches: 3.158168315887451.
375
+ [ Wed Sep 14 15:36:10 2022 ] Top1: 45.92%
376
+ [ Wed Sep 14 15:36:10 2022 ] Top5: 78.06%
377
+ [ Wed Sep 14 15:36:10 2022 ] Training epoch: 61
378
+ [ Wed Sep 14 15:36:28 2022 ] Batch(19/123) done. Loss: 0.0107 lr:0.010000 network_time: 0.0306
379
+ [ Wed Sep 14 15:37:40 2022 ] Batch(119/123) done. Loss: 0.0232 lr:0.010000 network_time: 0.0257
380
+ [ Wed Sep 14 15:37:43 2022 ] Eval epoch: 61
381
+ [ Wed Sep 14 15:38:15 2022 ] Mean test loss of 258 batches: 1.911889910697937.
382
+ [ Wed Sep 14 15:38:15 2022 ] Top1: 62.90%
383
+ [ Wed Sep 14 15:38:15 2022 ] Top5: 89.40%
384
+ [ Wed Sep 14 15:38:15 2022 ] Training epoch: 62
385
+ [ Wed Sep 14 15:39:29 2022 ] Batch(96/123) done. Loss: 0.0245 lr:0.010000 network_time: 0.0267
386
+ [ Wed Sep 14 15:39:48 2022 ] Eval epoch: 62
387
+ [ Wed Sep 14 15:40:20 2022 ] Mean test loss of 258 batches: 2.0928752422332764.
388
+ [ Wed Sep 14 15:40:20 2022 ] Top1: 62.90%
389
+ [ Wed Sep 14 15:40:21 2022 ] Top5: 89.33%
390
+ [ Wed Sep 14 15:40:21 2022 ] Training epoch: 63
391
+ [ Wed Sep 14 15:41:17 2022 ] Batch(73/123) done. Loss: 0.0077 lr:0.010000 network_time: 0.0316
392
+ [ Wed Sep 14 15:41:53 2022 ] Eval epoch: 63
393
+ [ Wed Sep 14 15:42:27 2022 ] Mean test loss of 258 batches: 2.0350732803344727.
394
+ [ Wed Sep 14 15:42:27 2022 ] Top1: 63.79%
395
+ [ Wed Sep 14 15:42:27 2022 ] Top5: 89.59%
396
+ [ Wed Sep 14 15:42:27 2022 ] Training epoch: 64
397
+ [ Wed Sep 14 15:43:07 2022 ] Batch(50/123) done. Loss: 0.0201 lr:0.010000 network_time: 0.0264
398
+ [ Wed Sep 14 15:43:59 2022 ] Eval epoch: 64
399
+ [ Wed Sep 14 15:44:32 2022 ] Mean test loss of 258 batches: 1.8492361307144165.
400
+ [ Wed Sep 14 15:44:32 2022 ] Top1: 64.04%
401
+ [ Wed Sep 14 15:44:32 2022 ] Top5: 90.05%
402
+ [ Wed Sep 14 15:44:32 2022 ] Training epoch: 65
403
+ [ Wed Sep 14 15:44:55 2022 ] Batch(27/123) done. Loss: 0.0059 lr:0.010000 network_time: 0.0258
404
+ [ Wed Sep 14 15:46:04 2022 ] Eval epoch: 65
405
+ [ Wed Sep 14 15:46:37 2022 ] Mean test loss of 258 batches: 1.795392632484436.
406
+ [ Wed Sep 14 15:46:37 2022 ] Top1: 63.41%
407
+ [ Wed Sep 14 15:46:37 2022 ] Top5: 90.03%
408
+ [ Wed Sep 14 15:46:37 2022 ] Training epoch: 66
409
+ [ Wed Sep 14 15:46:44 2022 ] Batch(4/123) done. Loss: 0.0055 lr:0.010000 network_time: 0.0255
410
+ [ Wed Sep 14 15:47:56 2022 ] Batch(104/123) done. Loss: 0.0576 lr:0.010000 network_time: 0.0271
411
+ [ Wed Sep 14 15:48:10 2022 ] Eval epoch: 66
412
+ [ Wed Sep 14 15:48:42 2022 ] Mean test loss of 258 batches: 1.9341166019439697.
413
+ [ Wed Sep 14 15:48:42 2022 ] Top1: 64.14%
414
+ [ Wed Sep 14 15:48:42 2022 ] Top5: 89.90%
415
+ [ Wed Sep 14 15:48:42 2022 ] Training epoch: 67
416
+ [ Wed Sep 14 15:49:45 2022 ] Batch(81/123) done. Loss: 0.0215 lr:0.010000 network_time: 0.0269
417
+ [ Wed Sep 14 15:50:15 2022 ] Eval epoch: 67
418
+ [ Wed Sep 14 15:50:48 2022 ] Mean test loss of 258 batches: 2.1112568378448486.
419
+ [ Wed Sep 14 15:50:48 2022 ] Top1: 61.59%
420
+ [ Wed Sep 14 15:50:48 2022 ] Top5: 88.41%
421
+ [ Wed Sep 14 15:50:48 2022 ] Training epoch: 68
422
+ [ Wed Sep 14 15:51:34 2022 ] Batch(58/123) done. Loss: 0.0064 lr:0.010000 network_time: 0.0294
423
+ [ Wed Sep 14 15:52:20 2022 ] Eval epoch: 68
424
+ [ Wed Sep 14 15:52:53 2022 ] Mean test loss of 258 batches: 1.7816262245178223.
425
+ [ Wed Sep 14 15:52:53 2022 ] Top1: 64.25%
426
+ [ Wed Sep 14 15:52:53 2022 ] Top5: 90.30%
427
+ [ Wed Sep 14 15:52:53 2022 ] Training epoch: 69
428
+ [ Wed Sep 14 15:53:22 2022 ] Batch(35/123) done. Loss: 0.0043 lr:0.010000 network_time: 0.0313
429
+ [ Wed Sep 14 15:54:26 2022 ] Eval epoch: 69
430
+ [ Wed Sep 14 15:54:58 2022 ] Mean test loss of 258 batches: 1.813872218132019.
431
+ [ Wed Sep 14 15:54:58 2022 ] Top1: 62.75%
432
+ [ Wed Sep 14 15:54:58 2022 ] Top5: 89.63%
433
+ [ Wed Sep 14 15:54:59 2022 ] Training epoch: 70
434
+ [ Wed Sep 14 15:55:11 2022 ] Batch(12/123) done. Loss: 0.0021 lr:0.010000 network_time: 0.0276
435
+ [ Wed Sep 14 15:56:24 2022 ] Batch(112/123) done. Loss: 0.0146 lr:0.010000 network_time: 0.0307
436
+ [ Wed Sep 14 15:56:31 2022 ] Eval epoch: 70
437
+ [ Wed Sep 14 15:57:04 2022 ] Mean test loss of 258 batches: 1.836743712425232.
438
+ [ Wed Sep 14 15:57:04 2022 ] Top1: 64.22%
439
+ [ Wed Sep 14 15:57:04 2022 ] Top5: 90.13%
440
+ [ Wed Sep 14 15:57:04 2022 ] Training epoch: 71
441
+ [ Wed Sep 14 15:58:12 2022 ] Batch(89/123) done. Loss: 0.0099 lr:0.010000 network_time: 0.0314
442
+ [ Wed Sep 14 15:58:37 2022 ] Eval epoch: 71
443
+ [ Wed Sep 14 15:59:09 2022 ] Mean test loss of 258 batches: 1.9741806983947754.
444
+ [ Wed Sep 14 15:59:09 2022 ] Top1: 64.31%
445
+ [ Wed Sep 14 15:59:10 2022 ] Top5: 89.92%
446
+ [ Wed Sep 14 15:59:10 2022 ] Training epoch: 72
447
+ [ Wed Sep 14 16:00:01 2022 ] Batch(66/123) done. Loss: 0.0041 lr:0.010000 network_time: 0.0271
448
+ [ Wed Sep 14 16:00:42 2022 ] Eval epoch: 72
449
+ [ Wed Sep 14 16:01:15 2022 ] Mean test loss of 258 batches: 1.8784939050674438.
450
+ [ Wed Sep 14 16:01:15 2022 ] Top1: 62.79%
451
+ [ Wed Sep 14 16:01:15 2022 ] Top5: 89.43%
452
+ [ Wed Sep 14 16:01:15 2022 ] Training epoch: 73
453
+ [ Wed Sep 14 16:01:50 2022 ] Batch(43/123) done. Loss: 0.0081 lr:0.010000 network_time: 0.0291
454
+ [ Wed Sep 14 16:02:48 2022 ] Eval epoch: 73
455
+ [ Wed Sep 14 16:03:21 2022 ] Mean test loss of 258 batches: 1.9898936748504639.
456
+ [ Wed Sep 14 16:03:21 2022 ] Top1: 60.02%
457
+ [ Wed Sep 14 16:03:21 2022 ] Top5: 88.44%
458
+ [ Wed Sep 14 16:03:21 2022 ] Training epoch: 74
459
+ [ Wed Sep 14 16:03:39 2022 ] Batch(20/123) done. Loss: 0.0023 lr:0.010000 network_time: 0.0536
460
+ [ Wed Sep 14 16:04:52 2022 ] Batch(120/123) done. Loss: 0.0129 lr:0.010000 network_time: 0.0269
461
+ [ Wed Sep 14 16:04:54 2022 ] Eval epoch: 74
462
+ [ Wed Sep 14 16:05:26 2022 ] Mean test loss of 258 batches: 1.978050947189331.
463
+ [ Wed Sep 14 16:05:26 2022 ] Top1: 63.94%
464
+ [ Wed Sep 14 16:05:26 2022 ] Top5: 89.74%
465
+ [ Wed Sep 14 16:05:27 2022 ] Training epoch: 75
466
+ [ Wed Sep 14 16:06:41 2022 ] Batch(97/123) done. Loss: 0.0034 lr:0.010000 network_time: 0.0278
467
+ [ Wed Sep 14 16:06:59 2022 ] Eval epoch: 75
468
+ [ Wed Sep 14 16:07:32 2022 ] Mean test loss of 258 batches: 1.8710790872573853.
469
+ [ Wed Sep 14 16:07:32 2022 ] Top1: 64.35%
470
+ [ Wed Sep 14 16:07:32 2022 ] Top5: 90.26%
471
+ [ Wed Sep 14 16:07:32 2022 ] Training epoch: 76
472
+ [ Wed Sep 14 16:08:30 2022 ] Batch(74/123) done. Loss: 0.0054 lr:0.010000 network_time: 0.0272
473
+ [ Wed Sep 14 16:09:05 2022 ] Eval epoch: 76
474
+ [ Wed Sep 14 16:09:37 2022 ] Mean test loss of 258 batches: 1.8576815128326416.
475
+ [ Wed Sep 14 16:09:37 2022 ] Top1: 63.73%
476
+ [ Wed Sep 14 16:09:37 2022 ] Top5: 89.97%
477
+ [ Wed Sep 14 16:09:37 2022 ] Training epoch: 77
478
+ [ Wed Sep 14 16:10:18 2022 ] Batch(51/123) done. Loss: 0.0042 lr:0.010000 network_time: 0.0267
479
+ [ Wed Sep 14 16:11:09 2022 ] Eval epoch: 77
480
+ [ Wed Sep 14 16:11:42 2022 ] Mean test loss of 258 batches: 2.003507375717163.
481
+ [ Wed Sep 14 16:11:42 2022 ] Top1: 59.48%
482
+ [ Wed Sep 14 16:11:43 2022 ] Top5: 88.16%
483
+ [ Wed Sep 14 16:11:43 2022 ] Training epoch: 78
484
+ [ Wed Sep 14 16:12:07 2022 ] Batch(28/123) done. Loss: 0.0060 lr:0.010000 network_time: 0.0274
485
+ [ Wed Sep 14 16:13:15 2022 ] Eval epoch: 78
486
+ [ Wed Sep 14 16:13:48 2022 ] Mean test loss of 258 batches: 1.820636510848999.
487
+ [ Wed Sep 14 16:13:48 2022 ] Top1: 64.49%
488
+ [ Wed Sep 14 16:13:48 2022 ] Top5: 90.28%
489
+ [ Wed Sep 14 16:13:48 2022 ] Training epoch: 79
490
+ [ Wed Sep 14 16:13:56 2022 ] Batch(5/123) done. Loss: 0.0029 lr:0.010000 network_time: 0.0282
491
+ [ Wed Sep 14 16:15:08 2022 ] Batch(105/123) done. Loss: 0.0084 lr:0.010000 network_time: 0.0319
492
+ [ Wed Sep 14 16:15:21 2022 ] Eval epoch: 79
493
+ [ Wed Sep 14 16:15:54 2022 ] Mean test loss of 258 batches: 1.90288507938385.
494
+ [ Wed Sep 14 16:15:54 2022 ] Top1: 62.17%
495
+ [ Wed Sep 14 16:15:54 2022 ] Top5: 89.42%
496
+ [ Wed Sep 14 16:15:54 2022 ] Training epoch: 80
497
+ [ Wed Sep 14 16:16:57 2022 ] Batch(82/123) done. Loss: 0.0056 lr:0.010000 network_time: 0.0267
498
+ [ Wed Sep 14 16:17:26 2022 ] Eval epoch: 80
499
+ [ Wed Sep 14 16:17:59 2022 ] Mean test loss of 258 batches: 1.7706035375595093.
500
+ [ Wed Sep 14 16:17:59 2022 ] Top1: 64.63%
501
+ [ Wed Sep 14 16:17:59 2022 ] Top5: 90.46%
502
+ [ Wed Sep 14 16:17:59 2022 ] Training epoch: 81
503
+ [ Wed Sep 14 16:18:46 2022 ] Batch(59/123) done. Loss: 0.0040 lr:0.001000 network_time: 0.0296
504
+ [ Wed Sep 14 16:19:32 2022 ] Eval epoch: 81
505
+ [ Wed Sep 14 16:20:04 2022 ] Mean test loss of 258 batches: 1.8429886102676392.
506
+ [ Wed Sep 14 16:20:04 2022 ] Top1: 64.60%
507
+ [ Wed Sep 14 16:20:04 2022 ] Top5: 90.33%
508
+ [ Wed Sep 14 16:20:04 2022 ] Training epoch: 82
509
+ [ Wed Sep 14 16:20:34 2022 ] Batch(36/123) done. Loss: 0.0052 lr:0.001000 network_time: 0.0295
510
+ [ Wed Sep 14 16:21:37 2022 ] Eval epoch: 82
511
+ [ Wed Sep 14 16:22:10 2022 ] Mean test loss of 258 batches: 1.933942198753357.
512
+ [ Wed Sep 14 16:22:10 2022 ] Top1: 63.77%
513
+ [ Wed Sep 14 16:22:10 2022 ] Top5: 89.74%
514
+ [ Wed Sep 14 16:22:10 2022 ] Training epoch: 83
515
+ [ Wed Sep 14 16:22:23 2022 ] Batch(13/123) done. Loss: 0.0026 lr:0.001000 network_time: 0.0325
516
+ [ Wed Sep 14 16:23:36 2022 ] Batch(113/123) done. Loss: 0.0026 lr:0.001000 network_time: 0.0279
517
+ [ Wed Sep 14 16:23:43 2022 ] Eval epoch: 83
518
+ [ Wed Sep 14 16:24:15 2022 ] Mean test loss of 258 batches: 1.7652544975280762.
519
+ [ Wed Sep 14 16:24:15 2022 ] Top1: 64.11%
520
+ [ Wed Sep 14 16:24:15 2022 ] Top5: 90.29%
521
+ [ Wed Sep 14 16:24:16 2022 ] Training epoch: 84
522
+ [ Wed Sep 14 16:25:25 2022 ] Batch(90/123) done. Loss: 0.0038 lr:0.001000 network_time: 0.0314
523
+ [ Wed Sep 14 16:25:48 2022 ] Eval epoch: 84
524
+ [ Wed Sep 14 16:26:21 2022 ] Mean test loss of 258 batches: 1.8277084827423096.
525
+ [ Wed Sep 14 16:26:21 2022 ] Top1: 63.18%
526
+ [ Wed Sep 14 16:26:21 2022 ] Top5: 89.96%
527
+ [ Wed Sep 14 16:26:21 2022 ] Training epoch: 85
528
+ [ Wed Sep 14 16:27:13 2022 ] Batch(67/123) done. Loss: 0.0028 lr:0.001000 network_time: 0.0271
529
+ [ Wed Sep 14 16:27:54 2022 ] Eval epoch: 85
530
+ [ Wed Sep 14 16:28:26 2022 ] Mean test loss of 258 batches: 1.7809817790985107.
531
+ [ Wed Sep 14 16:28:26 2022 ] Top1: 64.47%
532
+ [ Wed Sep 14 16:28:27 2022 ] Top5: 90.45%
533
+ [ Wed Sep 14 16:28:27 2022 ] Training epoch: 86
534
+ [ Wed Sep 14 16:29:02 2022 ] Batch(44/123) done. Loss: 0.0091 lr:0.001000 network_time: 0.0278
535
+ [ Wed Sep 14 16:29:59 2022 ] Eval epoch: 86
536
+ [ Wed Sep 14 16:30:32 2022 ] Mean test loss of 258 batches: 1.8707036972045898.
537
+ [ Wed Sep 14 16:30:32 2022 ] Top1: 62.34%
538
+ [ Wed Sep 14 16:30:32 2022 ] Top5: 89.38%
539
+ [ Wed Sep 14 16:30:32 2022 ] Training epoch: 87
540
+ [ Wed Sep 14 16:30:52 2022 ] Batch(21/123) done. Loss: 0.0035 lr:0.001000 network_time: 0.0260
541
+ [ Wed Sep 14 16:32:04 2022 ] Batch(121/123) done. Loss: 0.0062 lr:0.001000 network_time: 0.0307
542
+ [ Wed Sep 14 16:32:05 2022 ] Eval epoch: 87
543
+ [ Wed Sep 14 16:32:38 2022 ] Mean test loss of 258 batches: 1.8078993558883667.
544
+ [ Wed Sep 14 16:32:38 2022 ] Top1: 64.64%
545
+ [ Wed Sep 14 16:32:38 2022 ] Top5: 90.34%
546
+ [ Wed Sep 14 16:32:38 2022 ] Training epoch: 88
547
+ [ Wed Sep 14 16:33:53 2022 ] Batch(98/123) done. Loss: 0.0088 lr:0.001000 network_time: 0.0265
548
+ [ Wed Sep 14 16:34:11 2022 ] Eval epoch: 88
549
+ [ Wed Sep 14 16:34:44 2022 ] Mean test loss of 258 batches: 1.7731508016586304.
550
+ [ Wed Sep 14 16:34:44 2022 ] Top1: 64.29%
551
+ [ Wed Sep 14 16:34:44 2022 ] Top5: 90.33%
552
+ [ Wed Sep 14 16:34:44 2022 ] Training epoch: 89
553
+ [ Wed Sep 14 16:35:42 2022 ] Batch(75/123) done. Loss: 0.0014 lr:0.001000 network_time: 0.0288
554
+ [ Wed Sep 14 16:36:16 2022 ] Eval epoch: 89
555
+ [ Wed Sep 14 16:36:49 2022 ] Mean test loss of 258 batches: 1.8627841472625732.
556
+ [ Wed Sep 14 16:36:49 2022 ] Top1: 62.02%
557
+ [ Wed Sep 14 16:36:49 2022 ] Top5: 89.46%
558
+ [ Wed Sep 14 16:36:49 2022 ] Training epoch: 90
559
+ [ Wed Sep 14 16:37:31 2022 ] Batch(52/123) done. Loss: 0.0049 lr:0.001000 network_time: 0.0316
560
+ [ Wed Sep 14 16:38:22 2022 ] Eval epoch: 90
561
+ [ Wed Sep 14 16:38:55 2022 ] Mean test loss of 258 batches: 1.824079990386963.
562
+ [ Wed Sep 14 16:38:55 2022 ] Top1: 64.66%
563
+ [ Wed Sep 14 16:38:55 2022 ] Top5: 90.42%
564
+ [ Wed Sep 14 16:38:55 2022 ] Training epoch: 91
565
+ [ Wed Sep 14 16:39:20 2022 ] Batch(29/123) done. Loss: 0.0035 lr:0.001000 network_time: 0.0309
566
+ [ Wed Sep 14 16:40:28 2022 ] Eval epoch: 91
567
+ [ Wed Sep 14 16:41:01 2022 ] Mean test loss of 258 batches: 1.771101474761963.
568
+ [ Wed Sep 14 16:41:01 2022 ] Top1: 64.53%
569
+ [ Wed Sep 14 16:41:01 2022 ] Top5: 90.39%
570
+ [ Wed Sep 14 16:41:01 2022 ] Training epoch: 92
571
+ [ Wed Sep 14 16:41:09 2022 ] Batch(6/123) done. Loss: 0.0039 lr:0.001000 network_time: 0.0326
572
+ [ Wed Sep 14 16:42:22 2022 ] Batch(106/123) done. Loss: 0.0112 lr:0.001000 network_time: 0.0315
573
+ [ Wed Sep 14 16:42:33 2022 ] Eval epoch: 92
574
+ [ Wed Sep 14 16:43:06 2022 ] Mean test loss of 258 batches: 1.9300997257232666.
575
+ [ Wed Sep 14 16:43:06 2022 ] Top1: 64.31%
576
+ [ Wed Sep 14 16:43:06 2022 ] Top5: 90.14%
577
+ [ Wed Sep 14 16:43:06 2022 ] Training epoch: 93
578
+ [ Wed Sep 14 16:44:10 2022 ] Batch(83/123) done. Loss: 0.0053 lr:0.001000 network_time: 0.0368
579
+ [ Wed Sep 14 16:44:39 2022 ] Eval epoch: 93
580
+ [ Wed Sep 14 16:45:12 2022 ] Mean test loss of 258 batches: 1.7791281938552856.
581
+ [ Wed Sep 14 16:45:12 2022 ] Top1: 64.49%
582
+ [ Wed Sep 14 16:45:12 2022 ] Top5: 90.49%
583
+ [ Wed Sep 14 16:45:12 2022 ] Training epoch: 94
584
+ [ Wed Sep 14 16:45:59 2022 ] Batch(60/123) done. Loss: 0.0029 lr:0.001000 network_time: 0.0300
585
+ [ Wed Sep 14 16:46:45 2022 ] Eval epoch: 94
586
+ [ Wed Sep 14 16:47:17 2022 ] Mean test loss of 258 batches: 1.7987879514694214.
587
+ [ Wed Sep 14 16:47:17 2022 ] Top1: 64.46%
588
+ [ Wed Sep 14 16:47:17 2022 ] Top5: 90.42%
589
+ [ Wed Sep 14 16:47:17 2022 ] Training epoch: 95
590
+ [ Wed Sep 14 16:47:48 2022 ] Batch(37/123) done. Loss: 0.0069 lr:0.001000 network_time: 0.0321
591
+ [ Wed Sep 14 16:48:50 2022 ] Eval epoch: 95
592
+ [ Wed Sep 14 16:49:22 2022 ] Mean test loss of 258 batches: 1.9317775964736938.
593
+ [ Wed Sep 14 16:49:22 2022 ] Top1: 64.55%
594
+ [ Wed Sep 14 16:49:23 2022 ] Top5: 90.23%
595
+ [ Wed Sep 14 16:49:23 2022 ] Training epoch: 96
596
+ [ Wed Sep 14 16:49:37 2022 ] Batch(14/123) done. Loss: 0.0553 lr:0.001000 network_time: 0.0329
597
+ [ Wed Sep 14 16:50:49 2022 ] Batch(114/123) done. Loss: 0.0015 lr:0.001000 network_time: 0.0284
598
+ [ Wed Sep 14 16:50:55 2022 ] Eval epoch: 96
599
+ [ Wed Sep 14 16:51:28 2022 ] Mean test loss of 258 batches: 1.7748775482177734.
600
+ [ Wed Sep 14 16:51:28 2022 ] Top1: 63.97%
601
+ [ Wed Sep 14 16:51:28 2022 ] Top5: 90.09%
602
+ [ Wed Sep 14 16:51:28 2022 ] Training epoch: 97
603
+ [ Wed Sep 14 16:52:38 2022 ] Batch(91/123) done. Loss: 0.0035 lr:0.001000 network_time: 0.0269
604
+ [ Wed Sep 14 16:53:01 2022 ] Eval epoch: 97
605
+ [ Wed Sep 14 16:53:34 2022 ] Mean test loss of 258 batches: 2.001574754714966.
606
+ [ Wed Sep 14 16:53:34 2022 ] Top1: 59.91%
607
+ [ Wed Sep 14 16:53:34 2022 ] Top5: 88.32%
608
+ [ Wed Sep 14 16:53:34 2022 ] Training epoch: 98
609
+ [ Wed Sep 14 16:54:27 2022 ] Batch(68/123) done. Loss: 0.0029 lr:0.001000 network_time: 0.0311
610
+ [ Wed Sep 14 16:55:07 2022 ] Eval epoch: 98
611
+ [ Wed Sep 14 16:55:40 2022 ] Mean test loss of 258 batches: 1.9462146759033203.
612
+ [ Wed Sep 14 16:55:40 2022 ] Top1: 62.26%
613
+ [ Wed Sep 14 16:55:40 2022 ] Top5: 89.31%
614
+ [ Wed Sep 14 16:55:40 2022 ] Training epoch: 99
615
+ [ Wed Sep 14 16:56:16 2022 ] Batch(45/123) done. Loss: 0.0050 lr:0.001000 network_time: 0.0275
616
+ [ Wed Sep 14 16:57:12 2022 ] Eval epoch: 99
617
+ [ Wed Sep 14 16:57:45 2022 ] Mean test loss of 258 batches: 1.8766233921051025.
618
+ [ Wed Sep 14 16:57:45 2022 ] Top1: 64.34%
619
+ [ Wed Sep 14 16:57:45 2022 ] Top5: 90.11%
620
+ [ Wed Sep 14 16:57:45 2022 ] Training epoch: 100
621
+ [ Wed Sep 14 16:58:05 2022 ] Batch(22/123) done. Loss: 0.0029 lr:0.001000 network_time: 0.0298
622
+ [ Wed Sep 14 16:59:17 2022 ] Batch(122/123) done. Loss: 0.0024 lr:0.001000 network_time: 0.0273
623
+ [ Wed Sep 14 16:59:18 2022 ] Eval epoch: 100
624
+ [ Wed Sep 14 16:59:50 2022 ] Mean test loss of 258 batches: 2.055025339126587.
625
+ [ Wed Sep 14 16:59:50 2022 ] Top1: 63.57%
626
+ [ Wed Sep 14 16:59:50 2022 ] Top5: 89.82%
627
+ [ Wed Sep 14 16:59:51 2022 ] Training epoch: 101
628
+ [ Wed Sep 14 17:01:06 2022 ] Batch(99/123) done. Loss: 0.0027 lr:0.000100 network_time: 0.0278
629
+ [ Wed Sep 14 17:01:23 2022 ] Eval epoch: 101
630
+ [ Wed Sep 14 17:01:56 2022 ] Mean test loss of 258 batches: 2.1624438762664795.
631
+ [ Wed Sep 14 17:01:56 2022 ] Top1: 63.18%
632
+ [ Wed Sep 14 17:01:56 2022 ] Top5: 89.18%
633
+ [ Wed Sep 14 17:01:56 2022 ] Training epoch: 102
634
+ [ Wed Sep 14 17:02:55 2022 ] Batch(76/123) done. Loss: 0.0037 lr:0.000100 network_time: 0.0286
635
+ [ Wed Sep 14 17:03:29 2022 ] Eval epoch: 102
636
+ [ Wed Sep 14 17:04:01 2022 ] Mean test loss of 258 batches: 1.8533306121826172.
637
+ [ Wed Sep 14 17:04:01 2022 ] Top1: 62.90%
638
+ [ Wed Sep 14 17:04:02 2022 ] Top5: 89.73%
639
+ [ Wed Sep 14 17:04:02 2022 ] Training epoch: 103
640
+ [ Wed Sep 14 17:04:44 2022 ] Batch(53/123) done. Loss: 0.0062 lr:0.000100 network_time: 0.0278
641
+ [ Wed Sep 14 17:05:34 2022 ] Eval epoch: 103
642
+ [ Wed Sep 14 17:06:07 2022 ] Mean test loss of 258 batches: 1.783291220664978.
643
+ [ Wed Sep 14 17:06:07 2022 ] Top1: 64.95%
644
+ [ Wed Sep 14 17:06:07 2022 ] Top5: 90.61%
645
+ [ Wed Sep 14 17:06:07 2022 ] Training epoch: 104
646
+ [ Wed Sep 14 17:06:33 2022 ] Batch(30/123) done. Loss: 0.0126 lr:0.000100 network_time: 0.0259
647
+ [ Wed Sep 14 17:07:40 2022 ] Eval epoch: 104
648
+ [ Wed Sep 14 17:08:12 2022 ] Mean test loss of 258 batches: 2.0738165378570557.
649
+ [ Wed Sep 14 17:08:12 2022 ] Top1: 63.87%
650
+ [ Wed Sep 14 17:08:12 2022 ] Top5: 89.65%
651
+ [ Wed Sep 14 17:08:12 2022 ] Training epoch: 105
652
+ [ Wed Sep 14 17:08:21 2022 ] Batch(7/123) done. Loss: 0.0053 lr:0.000100 network_time: 0.0263
653
+ [ Wed Sep 14 17:09:33 2022 ] Batch(107/123) done. Loss: 0.0022 lr:0.000100 network_time: 0.0276
654
+ [ Wed Sep 14 17:09:45 2022 ] Eval epoch: 105
655
+ [ Wed Sep 14 17:10:18 2022 ] Mean test loss of 258 batches: 1.8350716829299927.
656
+ [ Wed Sep 14 17:10:18 2022 ] Top1: 63.67%
657
+ [ Wed Sep 14 17:10:18 2022 ] Top5: 90.12%
658
+ [ Wed Sep 14 17:10:18 2022 ] Training epoch: 106
659
+ [ Wed Sep 14 17:11:22 2022 ] Batch(84/123) done. Loss: 0.0068 lr:0.000100 network_time: 0.0269
660
+ [ Wed Sep 14 17:11:50 2022 ] Eval epoch: 106
661
+ [ Wed Sep 14 17:12:23 2022 ] Mean test loss of 258 batches: 1.885536551475525.
662
+ [ Wed Sep 14 17:12:23 2022 ] Top1: 63.25%
663
+ [ Wed Sep 14 17:12:23 2022 ] Top5: 89.85%
664
+ [ Wed Sep 14 17:12:23 2022 ] Training epoch: 107
665
+ [ Wed Sep 14 17:13:11 2022 ] Batch(61/123) done. Loss: 0.0039 lr:0.000100 network_time: 0.0272
666
+ [ Wed Sep 14 17:13:56 2022 ] Eval epoch: 107
667
+ [ Wed Sep 14 17:14:28 2022 ] Mean test loss of 258 batches: 1.8537285327911377.
668
+ [ Wed Sep 14 17:14:28 2022 ] Top1: 64.15%
669
+ [ Wed Sep 14 17:14:28 2022 ] Top5: 90.17%
670
+ [ Wed Sep 14 17:14:28 2022 ] Training epoch: 108
671
+ [ Wed Sep 14 17:15:00 2022 ] Batch(38/123) done. Loss: 0.0113 lr:0.000100 network_time: 0.0306
672
+ [ Wed Sep 14 17:16:01 2022 ] Eval epoch: 108
673
+ [ Wed Sep 14 17:16:33 2022 ] Mean test loss of 258 batches: 1.797559380531311.
674
+ [ Wed Sep 14 17:16:34 2022 ] Top1: 64.61%
675
+ [ Wed Sep 14 17:16:34 2022 ] Top5: 90.61%
676
+ [ Wed Sep 14 17:16:34 2022 ] Training epoch: 109
677
+ [ Wed Sep 14 17:16:48 2022 ] Batch(15/123) done. Loss: 0.0047 lr:0.000100 network_time: 0.0307
678
+ [ Wed Sep 14 17:18:01 2022 ] Batch(115/123) done. Loss: 0.0027 lr:0.000100 network_time: 0.0278
679
+ [ Wed Sep 14 17:18:06 2022 ] Eval epoch: 109
680
+ [ Wed Sep 14 17:18:39 2022 ] Mean test loss of 258 batches: 1.8364958763122559.
681
+ [ Wed Sep 14 17:18:39 2022 ] Top1: 64.34%
682
+ [ Wed Sep 14 17:18:39 2022 ] Top5: 90.31%
683
+ [ Wed Sep 14 17:18:39 2022 ] Training epoch: 110
684
+ [ Wed Sep 14 17:19:50 2022 ] Batch(92/123) done. Loss: 0.0052 lr:0.000100 network_time: 0.0274
685
+ [ Wed Sep 14 17:20:12 2022 ] Eval epoch: 110
686
+ [ Wed Sep 14 17:20:45 2022 ] Mean test loss of 258 batches: 1.7939398288726807.
687
+ [ Wed Sep 14 17:20:45 2022 ] Top1: 64.52%
688
+ [ Wed Sep 14 17:20:45 2022 ] Top5: 90.28%
689
+ [ Wed Sep 14 17:20:45 2022 ] Training epoch: 111
690
+ [ Wed Sep 14 17:21:39 2022 ] Batch(69/123) done. Loss: 0.0013 lr:0.000100 network_time: 0.0265
691
+ [ Wed Sep 14 17:22:17 2022 ] Eval epoch: 111
692
+ [ Wed Sep 14 17:22:50 2022 ] Mean test loss of 258 batches: 1.9241000413894653.
693
+ [ Wed Sep 14 17:22:50 2022 ] Top1: 64.59%
694
+ [ Wed Sep 14 17:22:50 2022 ] Top5: 90.22%
695
+ [ Wed Sep 14 17:22:50 2022 ] Training epoch: 112
696
+ [ Wed Sep 14 17:23:27 2022 ] Batch(46/123) done. Loss: 0.0044 lr:0.000100 network_time: 0.0266
697
+ [ Wed Sep 14 17:24:23 2022 ] Eval epoch: 112
698
+ [ Wed Sep 14 17:24:55 2022 ] Mean test loss of 258 batches: 1.7735662460327148.
699
+ [ Wed Sep 14 17:24:55 2022 ] Top1: 64.43%
700
+ [ Wed Sep 14 17:24:55 2022 ] Top5: 90.43%
701
+ [ Wed Sep 14 17:24:55 2022 ] Training epoch: 113
702
+ [ Wed Sep 14 17:25:16 2022 ] Batch(23/123) done. Loss: 0.0043 lr:0.000100 network_time: 0.0265
703
+ [ Wed Sep 14 17:26:28 2022 ] Eval epoch: 113
704
+ [ Wed Sep 14 17:27:01 2022 ] Mean test loss of 258 batches: 1.8969343900680542.
705
+ [ Wed Sep 14 17:27:01 2022 ] Top1: 64.16%
706
+ [ Wed Sep 14 17:27:01 2022 ] Top5: 90.10%
707
+ [ Wed Sep 14 17:27:01 2022 ] Training epoch: 114
708
+ [ Wed Sep 14 17:27:05 2022 ] Batch(0/123) done. Loss: 0.0043 lr:0.000100 network_time: 0.0598
709
+ [ Wed Sep 14 17:28:17 2022 ] Batch(100/123) done. Loss: 0.0062 lr:0.000100 network_time: 0.0330
710
+ [ Wed Sep 14 17:28:34 2022 ] Eval epoch: 114
711
+ [ Wed Sep 14 17:29:07 2022 ] Mean test loss of 258 batches: 1.8805485963821411.
712
+ [ Wed Sep 14 17:29:07 2022 ] Top1: 63.18%
713
+ [ Wed Sep 14 17:29:07 2022 ] Top5: 89.80%
714
+ [ Wed Sep 14 17:29:07 2022 ] Training epoch: 115
715
+ [ Wed Sep 14 17:30:07 2022 ] Batch(77/123) done. Loss: 0.0041 lr:0.000100 network_time: 0.0271
716
+ [ Wed Sep 14 17:30:39 2022 ] Eval epoch: 115
717
+ [ Wed Sep 14 17:31:12 2022 ] Mean test loss of 258 batches: 1.844014286994934.
718
+ [ Wed Sep 14 17:31:12 2022 ] Top1: 64.09%
719
+ [ Wed Sep 14 17:31:12 2022 ] Top5: 90.31%
720
+ [ Wed Sep 14 17:31:12 2022 ] Training epoch: 116
721
+ [ Wed Sep 14 17:31:55 2022 ] Batch(54/123) done. Loss: 0.0065 lr:0.000100 network_time: 0.0270
722
+ [ Wed Sep 14 17:32:45 2022 ] Eval epoch: 116
723
+ [ Wed Sep 14 17:33:18 2022 ] Mean test loss of 258 batches: 1.9180779457092285.
724
+ [ Wed Sep 14 17:33:18 2022 ] Top1: 63.95%
725
+ [ Wed Sep 14 17:33:18 2022 ] Top5: 90.06%
726
+ [ Wed Sep 14 17:33:18 2022 ] Training epoch: 117
727
+ [ Wed Sep 14 17:33:44 2022 ] Batch(31/123) done. Loss: 0.0030 lr:0.000100 network_time: 0.0264
728
+ [ Wed Sep 14 17:34:51 2022 ] Eval epoch: 117
729
+ [ Wed Sep 14 17:35:23 2022 ] Mean test loss of 258 batches: 1.81834077835083.
730
+ [ Wed Sep 14 17:35:23 2022 ] Top1: 64.39%
731
+ [ Wed Sep 14 17:35:23 2022 ] Top5: 90.51%
732
+ [ Wed Sep 14 17:35:23 2022 ] Training epoch: 118
733
+ [ Wed Sep 14 17:35:33 2022 ] Batch(8/123) done. Loss: 0.0031 lr:0.000100 network_time: 0.0276
734
+ [ Wed Sep 14 17:36:46 2022 ] Batch(108/123) done. Loss: 0.0026 lr:0.000100 network_time: 0.0264
735
+ [ Wed Sep 14 17:36:56 2022 ] Eval epoch: 118
736
+ [ Wed Sep 14 17:37:29 2022 ] Mean test loss of 258 batches: 1.917968511581421.
737
+ [ Wed Sep 14 17:37:29 2022 ] Top1: 63.95%
738
+ [ Wed Sep 14 17:37:29 2022 ] Top5: 90.01%
739
+ [ Wed Sep 14 17:37:29 2022 ] Training epoch: 119
740
+ [ Wed Sep 14 17:38:35 2022 ] Batch(85/123) done. Loss: 0.0034 lr:0.000100 network_time: 0.0285
741
+ [ Wed Sep 14 17:39:02 2022 ] Eval epoch: 119
742
+ [ Wed Sep 14 17:39:35 2022 ] Mean test loss of 258 batches: 1.7381958961486816.
743
+ [ Wed Sep 14 17:39:35 2022 ] Top1: 64.74%
744
+ [ Wed Sep 14 17:39:35 2022 ] Top5: 90.64%
745
+ [ Wed Sep 14 17:39:35 2022 ] Training epoch: 120
746
+ [ Wed Sep 14 17:40:24 2022 ] Batch(62/123) done. Loss: 0.0068 lr:0.000100 network_time: 0.0260
747
+ [ Wed Sep 14 17:41:08 2022 ] Eval epoch: 120
748
+ [ Wed Sep 14 17:41:41 2022 ] Mean test loss of 258 batches: 1.9656460285186768.
749
+ [ Wed Sep 14 17:41:41 2022 ] Top1: 63.85%
750
+ [ Wed Sep 14 17:41:41 2022 ] Top5: 89.90%
751
+ [ Wed Sep 14 17:41:41 2022 ] Training epoch: 121
752
+ [ Wed Sep 14 17:42:13 2022 ] Batch(39/123) done. Loss: 0.0029 lr:0.000100 network_time: 0.0317
753
+ [ Wed Sep 14 17:43:13 2022 ] Eval epoch: 121
754
+ [ Wed Sep 14 17:43:46 2022 ] Mean test loss of 258 batches: 1.8274791240692139.
755
+ [ Wed Sep 14 17:43:46 2022 ] Top1: 64.00%
756
+ [ Wed Sep 14 17:43:46 2022 ] Top5: 90.22%
757
+ [ Wed Sep 14 17:43:46 2022 ] Training epoch: 122
758
+ [ Wed Sep 14 17:44:02 2022 ] Batch(16/123) done. Loss: 0.0178 lr:0.000100 network_time: 0.0337
759
+ [ Wed Sep 14 17:45:15 2022 ] Batch(116/123) done. Loss: 0.0044 lr:0.000100 network_time: 0.0264
760
+ [ Wed Sep 14 17:45:19 2022 ] Eval epoch: 122
761
+ [ Wed Sep 14 17:45:52 2022 ] Mean test loss of 258 batches: 1.8402843475341797.
762
+ [ Wed Sep 14 17:45:52 2022 ] Top1: 63.36%
763
+ [ Wed Sep 14 17:45:53 2022 ] Top5: 90.08%
764
+ [ Wed Sep 14 17:45:53 2022 ] Training epoch: 123
765
+ [ Wed Sep 14 17:47:04 2022 ] Batch(93/123) done. Loss: 0.0018 lr:0.000100 network_time: 0.0277
766
+ [ Wed Sep 14 17:47:25 2022 ] Eval epoch: 123
767
+ [ Wed Sep 14 17:47:58 2022 ] Mean test loss of 258 batches: 1.957480549812317.
768
+ [ Wed Sep 14 17:47:58 2022 ] Top1: 64.07%
769
+ [ Wed Sep 14 17:47:59 2022 ] Top5: 90.03%
770
+ [ Wed Sep 14 17:47:59 2022 ] Training epoch: 124
771
+ [ Wed Sep 14 17:48:53 2022 ] Batch(70/123) done. Loss: 0.0031 lr:0.000100 network_time: 0.0276
772
+ [ Wed Sep 14 17:49:31 2022 ] Eval epoch: 124
773
+ [ Wed Sep 14 17:50:04 2022 ] Mean test loss of 258 batches: 1.8941755294799805.
774
+ [ Wed Sep 14 17:50:04 2022 ] Top1: 62.70%
775
+ [ Wed Sep 14 17:50:04 2022 ] Top5: 89.49%
776
+ [ Wed Sep 14 17:50:05 2022 ] Training epoch: 125
777
+ [ Wed Sep 14 17:50:43 2022 ] Batch(47/123) done. Loss: 0.0070 lr:0.000100 network_time: 0.0277
778
+ [ Wed Sep 14 17:51:37 2022 ] Eval epoch: 125
779
+ [ Wed Sep 14 17:52:10 2022 ] Mean test loss of 258 batches: 1.9753775596618652.
780
+ [ Wed Sep 14 17:52:10 2022 ] Top1: 64.40%
781
+ [ Wed Sep 14 17:52:10 2022 ] Top5: 90.04%
782
+ [ Wed Sep 14 17:52:10 2022 ] Training epoch: 126
783
+ [ Wed Sep 14 17:52:31 2022 ] Batch(24/123) done. Loss: 0.0036 lr:0.000100 network_time: 0.0311
784
+ [ Wed Sep 14 17:53:43 2022 ] Eval epoch: 126
785
+ [ Wed Sep 14 17:54:16 2022 ] Mean test loss of 258 batches: 1.9079700708389282.
786
+ [ Wed Sep 14 17:54:16 2022 ] Top1: 63.95%
787
+ [ Wed Sep 14 17:54:16 2022 ] Top5: 89.97%
788
+ [ Wed Sep 14 17:54:16 2022 ] Training epoch: 127
789
+ [ Wed Sep 14 17:54:20 2022 ] Batch(1/123) done. Loss: 0.0100 lr:0.000100 network_time: 0.0312
790
+ [ Wed Sep 14 17:55:33 2022 ] Batch(101/123) done. Loss: 0.0022 lr:0.000100 network_time: 0.0288
791
+ [ Wed Sep 14 17:55:48 2022 ] Eval epoch: 127
792
+ [ Wed Sep 14 17:56:21 2022 ] Mean test loss of 258 batches: 1.9740208387374878.
793
+ [ Wed Sep 14 17:56:21 2022 ] Top1: 63.95%
794
+ [ Wed Sep 14 17:56:22 2022 ] Top5: 89.88%
795
+ [ Wed Sep 14 17:56:22 2022 ] Training epoch: 128
796
+ [ Wed Sep 14 17:57:22 2022 ] Batch(78/123) done. Loss: 0.0012 lr:0.000100 network_time: 0.0290
797
+ [ Wed Sep 14 17:57:55 2022 ] Eval epoch: 128
798
+ [ Wed Sep 14 17:58:28 2022 ] Mean test loss of 258 batches: 1.942625641822815.
799
+ [ Wed Sep 14 17:58:28 2022 ] Top1: 63.92%
800
+ [ Wed Sep 14 17:58:28 2022 ] Top5: 90.06%
801
+ [ Wed Sep 14 17:58:28 2022 ] Training epoch: 129
802
+ [ Wed Sep 14 17:59:12 2022 ] Batch(55/123) done. Loss: 0.0108 lr:0.000100 network_time: 0.0276
803
+ [ Wed Sep 14 18:00:00 2022 ] Eval epoch: 129
804
+ [ Wed Sep 14 18:00:33 2022 ] Mean test loss of 258 batches: 1.9056057929992676.
805
+ [ Wed Sep 14 18:00:33 2022 ] Top1: 61.71%
806
+ [ Wed Sep 14 18:00:33 2022 ] Top5: 89.24%
807
+ [ Wed Sep 14 18:00:34 2022 ] Training epoch: 130
808
+ [ Wed Sep 14 18:01:01 2022 ] Batch(32/123) done. Loss: 0.0126 lr:0.000100 network_time: 0.0275
809
+ [ Wed Sep 14 18:02:06 2022 ] Eval epoch: 130
810
+ [ Wed Sep 14 18:02:39 2022 ] Mean test loss of 258 batches: 1.9844281673431396.
811
+ [ Wed Sep 14 18:02:39 2022 ] Top1: 64.03%
812
+ [ Wed Sep 14 18:02:39 2022 ] Top5: 89.79%
813
+ [ Wed Sep 14 18:02:39 2022 ] Training epoch: 131
814
+ [ Wed Sep 14 18:02:49 2022 ] Batch(9/123) done. Loss: 0.0030 lr:0.000100 network_time: 0.0293
815
+ [ Wed Sep 14 18:04:02 2022 ] Batch(109/123) done. Loss: 0.0041 lr:0.000100 network_time: 0.0286
816
+ [ Wed Sep 14 18:04:11 2022 ] Eval epoch: 131
817
+ [ Wed Sep 14 18:04:44 2022 ] Mean test loss of 258 batches: 2.0076076984405518.
818
+ [ Wed Sep 14 18:04:44 2022 ] Top1: 64.24%
819
+ [ Wed Sep 14 18:04:44 2022 ] Top5: 89.90%
820
+ [ Wed Sep 14 18:04:44 2022 ] Training epoch: 132
821
+ [ Wed Sep 14 18:05:50 2022 ] Batch(86/123) done. Loss: 0.0024 lr:0.000100 network_time: 0.0298
822
+ [ Wed Sep 14 18:06:17 2022 ] Eval epoch: 132
823
+ [ Wed Sep 14 18:06:49 2022 ] Mean test loss of 258 batches: 1.7660404443740845.
824
+ [ Wed Sep 14 18:06:49 2022 ] Top1: 64.48%
825
+ [ Wed Sep 14 18:06:49 2022 ] Top5: 90.42%
826
+ [ Wed Sep 14 18:06:50 2022 ] Training epoch: 133
827
+ [ Wed Sep 14 18:07:39 2022 ] Batch(63/123) done. Loss: 0.0091 lr:0.000100 network_time: 0.0277
828
+ [ Wed Sep 14 18:08:22 2022 ] Eval epoch: 133
829
+ [ Wed Sep 14 18:08:55 2022 ] Mean test loss of 258 batches: 1.7811264991760254.
830
+ [ Wed Sep 14 18:08:55 2022 ] Top1: 64.15%
831
+ [ Wed Sep 14 18:08:55 2022 ] Top5: 90.29%
832
+ [ Wed Sep 14 18:08:55 2022 ] Training epoch: 134
833
+ [ Wed Sep 14 18:09:28 2022 ] Batch(40/123) done. Loss: 0.0097 lr:0.000100 network_time: 0.0268
834
+ [ Wed Sep 14 18:10:28 2022 ] Eval epoch: 134
835
+ [ Wed Sep 14 18:11:00 2022 ] Mean test loss of 258 batches: 1.7638517618179321.
836
+ [ Wed Sep 14 18:11:00 2022 ] Top1: 64.26%
837
+ [ Wed Sep 14 18:11:01 2022 ] Top5: 90.34%
838
+ [ Wed Sep 14 18:11:01 2022 ] Training epoch: 135
839
+ [ Wed Sep 14 18:11:17 2022 ] Batch(17/123) done. Loss: 0.0032 lr:0.000100 network_time: 0.0278
840
+ [ Wed Sep 14 18:12:29 2022 ] Batch(117/123) done. Loss: 0.0040 lr:0.000100 network_time: 0.0329
841
+ [ Wed Sep 14 18:12:33 2022 ] Eval epoch: 135
842
+ [ Wed Sep 14 18:13:06 2022 ] Mean test loss of 258 batches: 1.834794044494629.
843
+ [ Wed Sep 14 18:13:06 2022 ] Top1: 62.77%
844
+ [ Wed Sep 14 18:13:06 2022 ] Top5: 89.71%
845
+ [ Wed Sep 14 18:13:06 2022 ] Training epoch: 136
846
+ [ Wed Sep 14 18:14:18 2022 ] Batch(94/123) done. Loss: 0.0225 lr:0.000100 network_time: 0.0307
847
+ [ Wed Sep 14 18:14:39 2022 ] Eval epoch: 136
848
+ [ Wed Sep 14 18:15:11 2022 ] Mean test loss of 258 batches: 1.848984718322754.
849
+ [ Wed Sep 14 18:15:11 2022 ] Top1: 64.85%
850
+ [ Wed Sep 14 18:15:11 2022 ] Top5: 90.39%
851
+ [ Wed Sep 14 18:15:11 2022 ] Training epoch: 137
852
+ [ Wed Sep 14 18:16:07 2022 ] Batch(71/123) done. Loss: 0.0038 lr:0.000100 network_time: 0.0291
853
+ [ Wed Sep 14 18:16:44 2022 ] Eval epoch: 137
854
+ [ Wed Sep 14 18:17:16 2022 ] Mean test loss of 258 batches: 1.887627363204956.
855
+ [ Wed Sep 14 18:17:17 2022 ] Top1: 64.55%
856
+ [ Wed Sep 14 18:17:17 2022 ] Top5: 90.42%
857
+ [ Wed Sep 14 18:17:17 2022 ] Training epoch: 138
858
+ [ Wed Sep 14 18:17:55 2022 ] Batch(48/123) done. Loss: 0.0091 lr:0.000100 network_time: 0.0314
859
+ [ Wed Sep 14 18:18:49 2022 ] Eval epoch: 138
860
+ [ Wed Sep 14 18:19:23 2022 ] Mean test loss of 258 batches: 1.8002877235412598.
861
+ [ Wed Sep 14 18:19:23 2022 ] Top1: 64.74%
862
+ [ Wed Sep 14 18:19:23 2022 ] Top5: 90.39%
863
+ [ Wed Sep 14 18:19:23 2022 ] Training epoch: 139
864
+ [ Wed Sep 14 18:19:45 2022 ] Batch(25/123) done. Loss: 0.0043 lr:0.000100 network_time: 0.0300
865
+ [ Wed Sep 14 18:20:55 2022 ] Eval epoch: 139
866
+ [ Wed Sep 14 18:21:28 2022 ] Mean test loss of 258 batches: 1.872145652770996.
867
+ [ Wed Sep 14 18:21:28 2022 ] Top1: 64.27%
868
+ [ Wed Sep 14 18:21:28 2022 ] Top5: 90.06%
869
+ [ Wed Sep 14 18:21:28 2022 ] Training epoch: 140
870
+ [ Wed Sep 14 18:21:34 2022 ] Batch(2/123) done. Loss: 0.0032 lr:0.000100 network_time: 0.0340
871
+ [ Wed Sep 14 18:22:46 2022 ] Batch(102/123) done. Loss: 0.0034 lr:0.000100 network_time: 0.0267
872
+ [ Wed Sep 14 18:23:01 2022 ] Eval epoch: 140
873
+ [ Wed Sep 14 18:23:34 2022 ] Mean test loss of 258 batches: 1.8609108924865723.
874
+ [ Wed Sep 14 18:23:34 2022 ] Top1: 64.27%
875
+ [ Wed Sep 14 18:23:34 2022 ] Top5: 90.23%
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_motion_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu_ShiftGCN_joint_xsub
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/nturgbd-cross-subject/train_joint.yaml
5
+ device:
6
+ - 4
7
+ - 5
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 60
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu_ShiftGCN_joint_xsub
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu_ShiftGCN_joint_xsub
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:08a6edb8ac93121efedccfaa08fe4ede9e0e78cb68578f758c768a9e17efa792
3
+ size 4979902
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/log.txt ADDED
@@ -0,0 +1,893 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 14 13:20:57 2022 ] Parameters:
2
+ {'work_dir': './work_dir/ntu_ShiftGCN_joint_xsub', 'model_saved_name': './save_models/ntu_ShiftGCN_joint_xsub', 'Experiment_name': 'ntu_ShiftGCN_joint_xsub', 'config': './config/nturgbd-cross-subject/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
3
+
4
+ [ Wed Sep 14 13:20:57 2022 ] Training epoch: 1
5
+ [ Wed Sep 14 13:21:38 2022 ] Parameters:
6
+ {'work_dir': './work_dir/ntu_ShiftGCN_joint_xsub', 'model_saved_name': './save_models/ntu_ShiftGCN_joint_xsub', 'Experiment_name': 'ntu_ShiftGCN_joint_xsub', 'config': './config/nturgbd-cross-subject/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
7
+
8
+ [ Wed Sep 14 13:21:38 2022 ] Training epoch: 1
9
+ [ Wed Sep 14 13:26:13 2022 ] Parameters:
10
+ {'work_dir': './work_dir/ntu_ShiftGCN_joint_xsub', 'model_saved_name': './save_models/ntu_ShiftGCN_joint_xsub', 'Experiment_name': 'ntu_ShiftGCN_joint_xsub', 'config': './config/nturgbd-cross-subject/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
11
+
12
+ [ Wed Sep 14 13:26:13 2022 ] Training epoch: 1
13
+ [ Wed Sep 14 13:27:32 2022 ] Batch(99/123) done. Loss: 2.1354 lr:0.100000 network_time: 0.0319
14
+ [ Wed Sep 14 13:27:49 2022 ] Eval epoch: 1
15
+ [ Wed Sep 14 13:28:22 2022 ] Mean test loss of 258 batches: 4.33281946182251.
16
+ [ Wed Sep 14 13:28:22 2022 ] Top1: 13.93%
17
+ [ Wed Sep 14 13:28:22 2022 ] Top5: 38.90%
18
+ [ Wed Sep 14 13:28:22 2022 ] Training epoch: 2
19
+ [ Wed Sep 14 13:30:08 2022 ] Parameters:
20
+ {'work_dir': './work_dir/ntu_ShiftGCN_joint_xsub', 'model_saved_name': './save_models/ntu_ShiftGCN_joint_xsub', 'Experiment_name': 'ntu_ShiftGCN_joint_xsub', 'config': './config/nturgbd-cross-subject/train_joint.yaml', 'phase': 'train', 'save_score': False, 'seed': 1, 'log_interval': 100, 'save_interval': 2, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.shift_gcn.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.1, 'step': [60, 80, 100], 'device': [4, 5], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 64, 'test_batch_size': 64, 'start_epoch': 0, 'num_epoch': 140, 'weight_decay': 0.0001, 'only_train_part': True, 'only_train_epoch': 1, 'warm_up_epoch': 0}
21
+
22
+ [ Wed Sep 14 13:30:08 2022 ] Training epoch: 1
23
+ [ Wed Sep 14 13:31:26 2022 ] Batch(99/123) done. Loss: 2.1354 lr:0.100000 network_time: 0.0317
24
+ [ Wed Sep 14 13:31:43 2022 ] Eval epoch: 1
25
+ [ Wed Sep 14 13:32:15 2022 ] Mean test loss of 258 batches: 4.33281946182251.
26
+ [ Wed Sep 14 13:32:15 2022 ] Top1: 13.93%
27
+ [ Wed Sep 14 13:32:15 2022 ] Top5: 38.90%
28
+ [ Wed Sep 14 13:32:15 2022 ] Training epoch: 2
29
+ [ Wed Sep 14 13:33:14 2022 ] Batch(76/123) done. Loss: 2.2529 lr:0.100000 network_time: 0.0313
30
+ [ Wed Sep 14 13:33:48 2022 ] Eval epoch: 2
31
+ [ Wed Sep 14 13:34:20 2022 ] Mean test loss of 258 batches: 3.571162700653076.
32
+ [ Wed Sep 14 13:34:20 2022 ] Top1: 19.44%
33
+ [ Wed Sep 14 13:34:21 2022 ] Top5: 50.74%
34
+ [ Wed Sep 14 13:34:21 2022 ] Training epoch: 3
35
+ [ Wed Sep 14 13:35:03 2022 ] Batch(53/123) done. Loss: 1.8599 lr:0.100000 network_time: 0.0305
36
+ [ Wed Sep 14 13:35:54 2022 ] Eval epoch: 3
37
+ [ Wed Sep 14 13:36:26 2022 ] Mean test loss of 258 batches: 3.5653231143951416.
38
+ [ Wed Sep 14 13:36:26 2022 ] Top1: 23.73%
39
+ [ Wed Sep 14 13:36:26 2022 ] Top5: 58.72%
40
+ [ Wed Sep 14 13:36:26 2022 ] Training epoch: 4
41
+ [ Wed Sep 14 13:36:52 2022 ] Batch(30/123) done. Loss: 1.6613 lr:0.100000 network_time: 0.0268
42
+ [ Wed Sep 14 13:37:59 2022 ] Eval epoch: 4
43
+ [ Wed Sep 14 13:38:31 2022 ] Mean test loss of 258 batches: 2.9171056747436523.
44
+ [ Wed Sep 14 13:38:31 2022 ] Top1: 29.33%
45
+ [ Wed Sep 14 13:38:31 2022 ] Top5: 64.11%
46
+ [ Wed Sep 14 13:38:31 2022 ] Training epoch: 5
47
+ [ Wed Sep 14 13:38:40 2022 ] Batch(7/123) done. Loss: 1.1064 lr:0.100000 network_time: 0.0258
48
+ [ Wed Sep 14 13:39:53 2022 ] Batch(107/123) done. Loss: 1.2393 lr:0.100000 network_time: 0.0274
49
+ [ Wed Sep 14 13:40:04 2022 ] Eval epoch: 5
50
+ [ Wed Sep 14 13:40:36 2022 ] Mean test loss of 258 batches: 2.6525120735168457.
51
+ [ Wed Sep 14 13:40:36 2022 ] Top1: 33.77%
52
+ [ Wed Sep 14 13:40:36 2022 ] Top5: 70.72%
53
+ [ Wed Sep 14 13:40:37 2022 ] Training epoch: 6
54
+ [ Wed Sep 14 13:41:41 2022 ] Batch(84/123) done. Loss: 1.2044 lr:0.100000 network_time: 0.0322
55
+ [ Wed Sep 14 13:42:09 2022 ] Eval epoch: 6
56
+ [ Wed Sep 14 13:42:42 2022 ] Mean test loss of 258 batches: 2.4061315059661865.
57
+ [ Wed Sep 14 13:42:42 2022 ] Top1: 35.90%
58
+ [ Wed Sep 14 13:42:42 2022 ] Top5: 73.77%
59
+ [ Wed Sep 14 13:42:42 2022 ] Training epoch: 7
60
+ [ Wed Sep 14 13:43:30 2022 ] Batch(61/123) done. Loss: 0.9143 lr:0.100000 network_time: 0.0260
61
+ [ Wed Sep 14 13:44:15 2022 ] Eval epoch: 7
62
+ [ Wed Sep 14 13:44:47 2022 ] Mean test loss of 258 batches: 2.4678521156311035.
63
+ [ Wed Sep 14 13:44:48 2022 ] Top1: 38.75%
64
+ [ Wed Sep 14 13:44:48 2022 ] Top5: 73.76%
65
+ [ Wed Sep 14 13:44:48 2022 ] Training epoch: 8
66
+ [ Wed Sep 14 13:45:19 2022 ] Batch(38/123) done. Loss: 1.0245 lr:0.100000 network_time: 0.0257
67
+ [ Wed Sep 14 13:46:20 2022 ] Eval epoch: 8
68
+ [ Wed Sep 14 13:46:53 2022 ] Mean test loss of 258 batches: 2.290700912475586.
69
+ [ Wed Sep 14 13:46:53 2022 ] Top1: 39.76%
70
+ [ Wed Sep 14 13:46:53 2022 ] Top5: 77.05%
71
+ [ Wed Sep 14 13:46:53 2022 ] Training epoch: 9
72
+ [ Wed Sep 14 13:47:08 2022 ] Batch(15/123) done. Loss: 1.0405 lr:0.100000 network_time: 0.0276
73
+ [ Wed Sep 14 13:48:21 2022 ] Batch(115/123) done. Loss: 1.0858 lr:0.100000 network_time: 0.0317
74
+ [ Wed Sep 14 13:48:26 2022 ] Eval epoch: 9
75
+ [ Wed Sep 14 13:48:59 2022 ] Mean test loss of 258 batches: 2.499346971511841.
76
+ [ Wed Sep 14 13:48:59 2022 ] Top1: 40.40%
77
+ [ Wed Sep 14 13:48:59 2022 ] Top5: 74.43%
78
+ [ Wed Sep 14 13:48:59 2022 ] Training epoch: 10
79
+ [ Wed Sep 14 13:50:10 2022 ] Batch(92/123) done. Loss: 1.0761 lr:0.100000 network_time: 0.0295
80
+ [ Wed Sep 14 13:50:32 2022 ] Eval epoch: 10
81
+ [ Wed Sep 14 13:51:05 2022 ] Mean test loss of 258 batches: 2.265429973602295.
82
+ [ Wed Sep 14 13:51:05 2022 ] Top1: 41.90%
83
+ [ Wed Sep 14 13:51:05 2022 ] Top5: 76.64%
84
+ [ Wed Sep 14 13:51:05 2022 ] Training epoch: 11
85
+ [ Wed Sep 14 13:51:59 2022 ] Batch(69/123) done. Loss: 0.6436 lr:0.100000 network_time: 0.0291
86
+ [ Wed Sep 14 13:52:38 2022 ] Eval epoch: 11
87
+ [ Wed Sep 14 13:53:11 2022 ] Mean test loss of 258 batches: 2.014800786972046.
88
+ [ Wed Sep 14 13:53:11 2022 ] Top1: 47.50%
89
+ [ Wed Sep 14 13:53:11 2022 ] Top5: 81.51%
90
+ [ Wed Sep 14 13:53:11 2022 ] Training epoch: 12
91
+ [ Wed Sep 14 13:53:49 2022 ] Batch(46/123) done. Loss: 0.6086 lr:0.100000 network_time: 0.0289
92
+ [ Wed Sep 14 13:54:44 2022 ] Eval epoch: 12
93
+ [ Wed Sep 14 13:55:17 2022 ] Mean test loss of 258 batches: 2.291834831237793.
94
+ [ Wed Sep 14 13:55:17 2022 ] Top1: 46.36%
95
+ [ Wed Sep 14 13:55:17 2022 ] Top5: 81.22%
96
+ [ Wed Sep 14 13:55:17 2022 ] Training epoch: 13
97
+ [ Wed Sep 14 13:55:38 2022 ] Batch(23/123) done. Loss: 0.8663 lr:0.100000 network_time: 0.0259
98
+ [ Wed Sep 14 13:56:50 2022 ] Eval epoch: 13
99
+ [ Wed Sep 14 13:57:22 2022 ] Mean test loss of 258 batches: 2.2155113220214844.
100
+ [ Wed Sep 14 13:57:22 2022 ] Top1: 46.39%
101
+ [ Wed Sep 14 13:57:23 2022 ] Top5: 80.01%
102
+ [ Wed Sep 14 13:57:23 2022 ] Training epoch: 14
103
+ [ Wed Sep 14 13:57:26 2022 ] Batch(0/123) done. Loss: 0.6118 lr:0.100000 network_time: 0.0475
104
+ [ Wed Sep 14 13:58:39 2022 ] Batch(100/123) done. Loss: 0.4732 lr:0.100000 network_time: 0.0320
105
+ [ Wed Sep 14 13:58:56 2022 ] Eval epoch: 14
106
+ [ Wed Sep 14 13:59:28 2022 ] Mean test loss of 258 batches: 2.701786518096924.
107
+ [ Wed Sep 14 13:59:28 2022 ] Top1: 38.97%
108
+ [ Wed Sep 14 13:59:28 2022 ] Top5: 75.62%
109
+ [ Wed Sep 14 13:59:28 2022 ] Training epoch: 15
110
+ [ Wed Sep 14 14:00:28 2022 ] Batch(77/123) done. Loss: 0.6637 lr:0.100000 network_time: 0.0265
111
+ [ Wed Sep 14 14:01:01 2022 ] Eval epoch: 15
112
+ [ Wed Sep 14 14:01:33 2022 ] Mean test loss of 258 batches: 2.2561497688293457.
113
+ [ Wed Sep 14 14:01:33 2022 ] Top1: 46.73%
114
+ [ Wed Sep 14 14:01:33 2022 ] Top5: 79.88%
115
+ [ Wed Sep 14 14:01:33 2022 ] Training epoch: 16
116
+ [ Wed Sep 14 14:02:16 2022 ] Batch(54/123) done. Loss: 0.6604 lr:0.100000 network_time: 0.0298
117
+ [ Wed Sep 14 14:03:06 2022 ] Eval epoch: 16
118
+ [ Wed Sep 14 14:03:38 2022 ] Mean test loss of 258 batches: 2.2094640731811523.
119
+ [ Wed Sep 14 14:03:38 2022 ] Top1: 46.29%
120
+ [ Wed Sep 14 14:03:38 2022 ] Top5: 82.10%
121
+ [ Wed Sep 14 14:03:39 2022 ] Training epoch: 17
122
+ [ Wed Sep 14 14:04:05 2022 ] Batch(31/123) done. Loss: 0.5614 lr:0.100000 network_time: 0.0300
123
+ [ Wed Sep 14 14:05:12 2022 ] Eval epoch: 17
124
+ [ Wed Sep 14 14:05:45 2022 ] Mean test loss of 258 batches: 2.6692111492156982.
125
+ [ Wed Sep 14 14:05:45 2022 ] Top1: 41.47%
126
+ [ Wed Sep 14 14:05:45 2022 ] Top5: 77.44%
127
+ [ Wed Sep 14 14:05:45 2022 ] Training epoch: 18
128
+ [ Wed Sep 14 14:05:55 2022 ] Batch(8/123) done. Loss: 0.3214 lr:0.100000 network_time: 0.0334
129
+ [ Wed Sep 14 14:07:08 2022 ] Batch(108/123) done. Loss: 0.4732 lr:0.100000 network_time: 0.0279
130
+ [ Wed Sep 14 14:07:18 2022 ] Eval epoch: 18
131
+ [ Wed Sep 14 14:07:50 2022 ] Mean test loss of 258 batches: 2.0434653759002686.
132
+ [ Wed Sep 14 14:07:50 2022 ] Top1: 51.87%
133
+ [ Wed Sep 14 14:07:51 2022 ] Top5: 83.21%
134
+ [ Wed Sep 14 14:07:51 2022 ] Training epoch: 19
135
+ [ Wed Sep 14 14:08:57 2022 ] Batch(85/123) done. Loss: 0.4650 lr:0.100000 network_time: 0.0277
136
+ [ Wed Sep 14 14:09:24 2022 ] Eval epoch: 19
137
+ [ Wed Sep 14 14:09:56 2022 ] Mean test loss of 258 batches: 1.86018967628479.
138
+ [ Wed Sep 14 14:09:57 2022 ] Top1: 51.05%
139
+ [ Wed Sep 14 14:09:57 2022 ] Top5: 83.74%
140
+ [ Wed Sep 14 14:09:57 2022 ] Training epoch: 20
141
+ [ Wed Sep 14 14:10:46 2022 ] Batch(62/123) done. Loss: 0.3363 lr:0.100000 network_time: 0.0292
142
+ [ Wed Sep 14 14:11:30 2022 ] Eval epoch: 20
143
+ [ Wed Sep 14 14:12:02 2022 ] Mean test loss of 258 batches: 2.408405065536499.
144
+ [ Wed Sep 14 14:12:02 2022 ] Top1: 46.35%
145
+ [ Wed Sep 14 14:12:03 2022 ] Top5: 79.66%
146
+ [ Wed Sep 14 14:12:03 2022 ] Training epoch: 21
147
+ [ Wed Sep 14 14:12:35 2022 ] Batch(39/123) done. Loss: 0.5413 lr:0.100000 network_time: 0.0288
148
+ [ Wed Sep 14 14:13:36 2022 ] Eval epoch: 21
149
+ [ Wed Sep 14 14:14:08 2022 ] Mean test loss of 258 batches: 2.3089609146118164.
150
+ [ Wed Sep 14 14:14:08 2022 ] Top1: 45.38%
151
+ [ Wed Sep 14 14:14:08 2022 ] Top5: 82.33%
152
+ [ Wed Sep 14 14:14:09 2022 ] Training epoch: 22
153
+ [ Wed Sep 14 14:14:24 2022 ] Batch(16/123) done. Loss: 0.4254 lr:0.100000 network_time: 0.0332
154
+ [ Wed Sep 14 14:15:37 2022 ] Batch(116/123) done. Loss: 0.4475 lr:0.100000 network_time: 0.0265
155
+ [ Wed Sep 14 14:15:42 2022 ] Eval epoch: 22
156
+ [ Wed Sep 14 14:16:14 2022 ] Mean test loss of 258 batches: 2.318674087524414.
157
+ [ Wed Sep 14 14:16:14 2022 ] Top1: 47.51%
158
+ [ Wed Sep 14 14:16:15 2022 ] Top5: 82.03%
159
+ [ Wed Sep 14 14:16:15 2022 ] Training epoch: 23
160
+ [ Wed Sep 14 14:17:26 2022 ] Batch(93/123) done. Loss: 0.5333 lr:0.100000 network_time: 0.0286
161
+ [ Wed Sep 14 14:17:48 2022 ] Eval epoch: 23
162
+ [ Wed Sep 14 14:18:20 2022 ] Mean test loss of 258 batches: 2.097595453262329.
163
+ [ Wed Sep 14 14:18:20 2022 ] Top1: 49.50%
164
+ [ Wed Sep 14 14:18:20 2022 ] Top5: 83.20%
165
+ [ Wed Sep 14 14:18:21 2022 ] Training epoch: 24
166
+ [ Wed Sep 14 14:19:15 2022 ] Batch(70/123) done. Loss: 0.5075 lr:0.100000 network_time: 0.0293
167
+ [ Wed Sep 14 14:19:54 2022 ] Eval epoch: 24
168
+ [ Wed Sep 14 14:20:26 2022 ] Mean test loss of 258 batches: 2.4013638496398926.
169
+ [ Wed Sep 14 14:20:26 2022 ] Top1: 48.10%
170
+ [ Wed Sep 14 14:20:26 2022 ] Top5: 81.03%
171
+ [ Wed Sep 14 14:20:26 2022 ] Training epoch: 25
172
+ [ Wed Sep 14 14:21:05 2022 ] Batch(47/123) done. Loss: 0.4802 lr:0.100000 network_time: 0.0293
173
+ [ Wed Sep 14 14:22:00 2022 ] Eval epoch: 25
174
+ [ Wed Sep 14 14:22:32 2022 ] Mean test loss of 258 batches: 2.116831064224243.
175
+ [ Wed Sep 14 14:22:33 2022 ] Top1: 49.18%
176
+ [ Wed Sep 14 14:22:33 2022 ] Top5: 83.03%
177
+ [ Wed Sep 14 14:22:33 2022 ] Training epoch: 26
178
+ [ Wed Sep 14 14:22:54 2022 ] Batch(24/123) done. Loss: 0.7263 lr:0.100000 network_time: 0.0276
179
+ [ Wed Sep 14 14:24:06 2022 ] Eval epoch: 26
180
+ [ Wed Sep 14 14:24:38 2022 ] Mean test loss of 258 batches: 1.9685046672821045.
181
+ [ Wed Sep 14 14:24:38 2022 ] Top1: 53.72%
182
+ [ Wed Sep 14 14:24:38 2022 ] Top5: 85.21%
183
+ [ Wed Sep 14 14:24:38 2022 ] Training epoch: 27
184
+ [ Wed Sep 14 14:24:43 2022 ] Batch(1/123) done. Loss: 0.1548 lr:0.100000 network_time: 0.0319
185
+ [ Wed Sep 14 14:25:56 2022 ] Batch(101/123) done. Loss: 0.3913 lr:0.100000 network_time: 0.0269
186
+ [ Wed Sep 14 14:26:11 2022 ] Eval epoch: 27
187
+ [ Wed Sep 14 14:26:43 2022 ] Mean test loss of 258 batches: 2.30924391746521.
188
+ [ Wed Sep 14 14:26:43 2022 ] Top1: 50.16%
189
+ [ Wed Sep 14 14:26:43 2022 ] Top5: 82.26%
190
+ [ Wed Sep 14 14:26:44 2022 ] Training epoch: 28
191
+ [ Wed Sep 14 14:27:44 2022 ] Batch(78/123) done. Loss: 0.5422 lr:0.100000 network_time: 0.0262
192
+ [ Wed Sep 14 14:28:17 2022 ] Eval epoch: 28
193
+ [ Wed Sep 14 14:28:48 2022 ] Mean test loss of 258 batches: 2.1971280574798584.
194
+ [ Wed Sep 14 14:28:49 2022 ] Top1: 52.06%
195
+ [ Wed Sep 14 14:28:49 2022 ] Top5: 84.11%
196
+ [ Wed Sep 14 14:28:49 2022 ] Training epoch: 29
197
+ [ Wed Sep 14 14:29:32 2022 ] Batch(55/123) done. Loss: 0.3558 lr:0.100000 network_time: 0.0311
198
+ [ Wed Sep 14 14:30:22 2022 ] Eval epoch: 29
199
+ [ Wed Sep 14 14:30:53 2022 ] Mean test loss of 258 batches: 1.991234540939331.
200
+ [ Wed Sep 14 14:30:54 2022 ] Top1: 53.61%
201
+ [ Wed Sep 14 14:30:54 2022 ] Top5: 85.71%
202
+ [ Wed Sep 14 14:30:54 2022 ] Training epoch: 30
203
+ [ Wed Sep 14 14:31:21 2022 ] Batch(32/123) done. Loss: 0.3585 lr:0.100000 network_time: 0.0273
204
+ [ Wed Sep 14 14:32:26 2022 ] Eval epoch: 30
205
+ [ Wed Sep 14 14:32:59 2022 ] Mean test loss of 258 batches: 1.8016409873962402.
206
+ [ Wed Sep 14 14:32:59 2022 ] Top1: 56.30%
207
+ [ Wed Sep 14 14:32:59 2022 ] Top5: 86.58%
208
+ [ Wed Sep 14 14:32:59 2022 ] Training epoch: 31
209
+ [ Wed Sep 14 14:33:09 2022 ] Batch(9/123) done. Loss: 0.2505 lr:0.100000 network_time: 0.0281
210
+ [ Wed Sep 14 14:34:22 2022 ] Batch(109/123) done. Loss: 0.2635 lr:0.100000 network_time: 0.0264
211
+ [ Wed Sep 14 14:34:32 2022 ] Eval epoch: 31
212
+ [ Wed Sep 14 14:35:05 2022 ] Mean test loss of 258 batches: 2.105398178100586.
213
+ [ Wed Sep 14 14:35:05 2022 ] Top1: 51.54%
214
+ [ Wed Sep 14 14:35:05 2022 ] Top5: 84.89%
215
+ [ Wed Sep 14 14:35:05 2022 ] Training epoch: 32
216
+ [ Wed Sep 14 14:36:11 2022 ] Batch(86/123) done. Loss: 0.4799 lr:0.100000 network_time: 0.0263
217
+ [ Wed Sep 14 14:36:38 2022 ] Eval epoch: 32
218
+ [ Wed Sep 14 14:37:10 2022 ] Mean test loss of 258 batches: 2.0153965950012207.
219
+ [ Wed Sep 14 14:37:10 2022 ] Top1: 52.47%
220
+ [ Wed Sep 14 14:37:10 2022 ] Top5: 85.09%
221
+ [ Wed Sep 14 14:37:10 2022 ] Training epoch: 33
222
+ [ Wed Sep 14 14:38:00 2022 ] Batch(63/123) done. Loss: 0.3095 lr:0.100000 network_time: 0.0275
223
+ [ Wed Sep 14 14:38:43 2022 ] Eval epoch: 33
224
+ [ Wed Sep 14 14:39:15 2022 ] Mean test loss of 258 batches: 2.7301554679870605.
225
+ [ Wed Sep 14 14:39:15 2022 ] Top1: 47.96%
226
+ [ Wed Sep 14 14:39:15 2022 ] Top5: 81.24%
227
+ [ Wed Sep 14 14:39:15 2022 ] Training epoch: 34
228
+ [ Wed Sep 14 14:39:48 2022 ] Batch(40/123) done. Loss: 0.3923 lr:0.100000 network_time: 0.0274
229
+ [ Wed Sep 14 14:40:48 2022 ] Eval epoch: 34
230
+ [ Wed Sep 14 14:41:21 2022 ] Mean test loss of 258 batches: 1.9540212154388428.
231
+ [ Wed Sep 14 14:41:21 2022 ] Top1: 54.04%
232
+ [ Wed Sep 14 14:41:21 2022 ] Top5: 85.81%
233
+ [ Wed Sep 14 14:41:21 2022 ] Training epoch: 35
234
+ [ Wed Sep 14 14:41:37 2022 ] Batch(17/123) done. Loss: 0.1987 lr:0.100000 network_time: 0.0280
235
+ [ Wed Sep 14 14:42:50 2022 ] Batch(117/123) done. Loss: 0.3243 lr:0.100000 network_time: 0.0291
236
+ [ Wed Sep 14 14:42:54 2022 ] Eval epoch: 35
237
+ [ Wed Sep 14 14:43:26 2022 ] Mean test loss of 258 batches: 2.593190908432007.
238
+ [ Wed Sep 14 14:43:26 2022 ] Top1: 50.09%
239
+ [ Wed Sep 14 14:43:26 2022 ] Top5: 81.51%
240
+ [ Wed Sep 14 14:43:26 2022 ] Training epoch: 36
241
+ [ Wed Sep 14 14:44:38 2022 ] Batch(94/123) done. Loss: 0.3176 lr:0.100000 network_time: 0.0269
242
+ [ Wed Sep 14 14:44:59 2022 ] Eval epoch: 36
243
+ [ Wed Sep 14 14:45:31 2022 ] Mean test loss of 258 batches: 2.1365630626678467.
244
+ [ Wed Sep 14 14:45:32 2022 ] Top1: 54.51%
245
+ [ Wed Sep 14 14:45:32 2022 ] Top5: 84.55%
246
+ [ Wed Sep 14 14:45:32 2022 ] Training epoch: 37
247
+ [ Wed Sep 14 14:46:27 2022 ] Batch(71/123) done. Loss: 0.2958 lr:0.100000 network_time: 0.0255
248
+ [ Wed Sep 14 14:47:05 2022 ] Eval epoch: 37
249
+ [ Wed Sep 14 14:47:36 2022 ] Mean test loss of 258 batches: 2.1151015758514404.
250
+ [ Wed Sep 14 14:47:37 2022 ] Top1: 51.03%
251
+ [ Wed Sep 14 14:47:37 2022 ] Top5: 84.02%
252
+ [ Wed Sep 14 14:47:37 2022 ] Training epoch: 38
253
+ [ Wed Sep 14 14:48:15 2022 ] Batch(48/123) done. Loss: 0.2557 lr:0.100000 network_time: 0.0309
254
+ [ Wed Sep 14 14:49:10 2022 ] Eval epoch: 38
255
+ [ Wed Sep 14 14:49:41 2022 ] Mean test loss of 258 batches: 2.130195140838623.
256
+ [ Wed Sep 14 14:49:42 2022 ] Top1: 52.40%
257
+ [ Wed Sep 14 14:49:42 2022 ] Top5: 83.91%
258
+ [ Wed Sep 14 14:49:42 2022 ] Training epoch: 39
259
+ [ Wed Sep 14 14:50:03 2022 ] Batch(25/123) done. Loss: 0.2468 lr:0.100000 network_time: 0.0258
260
+ [ Wed Sep 14 14:51:15 2022 ] Eval epoch: 39
261
+ [ Wed Sep 14 14:51:47 2022 ] Mean test loss of 258 batches: 2.4296348094940186.
262
+ [ Wed Sep 14 14:51:47 2022 ] Top1: 52.08%
263
+ [ Wed Sep 14 14:51:47 2022 ] Top5: 83.84%
264
+ [ Wed Sep 14 14:51:47 2022 ] Training epoch: 40
265
+ [ Wed Sep 14 14:51:52 2022 ] Batch(2/123) done. Loss: 0.4386 lr:0.100000 network_time: 0.0269
266
+ [ Wed Sep 14 14:53:05 2022 ] Batch(102/123) done. Loss: 0.2768 lr:0.100000 network_time: 0.0305
267
+ [ Wed Sep 14 14:53:20 2022 ] Eval epoch: 40
268
+ [ Wed Sep 14 14:53:52 2022 ] Mean test loss of 258 batches: 2.3187801837921143.
269
+ [ Wed Sep 14 14:53:52 2022 ] Top1: 51.82%
270
+ [ Wed Sep 14 14:53:52 2022 ] Top5: 83.99%
271
+ [ Wed Sep 14 14:53:52 2022 ] Training epoch: 41
272
+ [ Wed Sep 14 14:54:54 2022 ] Batch(79/123) done. Loss: 0.2464 lr:0.100000 network_time: 0.0303
273
+ [ Wed Sep 14 14:55:25 2022 ] Eval epoch: 41
274
+ [ Wed Sep 14 14:55:58 2022 ] Mean test loss of 258 batches: 2.4942824840545654.
275
+ [ Wed Sep 14 14:55:58 2022 ] Top1: 51.19%
276
+ [ Wed Sep 14 14:55:58 2022 ] Top5: 83.19%
277
+ [ Wed Sep 14 14:55:58 2022 ] Training epoch: 42
278
+ [ Wed Sep 14 14:56:43 2022 ] Batch(56/123) done. Loss: 0.1342 lr:0.100000 network_time: 0.0293
279
+ [ Wed Sep 14 14:57:31 2022 ] Eval epoch: 42
280
+ [ Wed Sep 14 14:58:03 2022 ] Mean test loss of 258 batches: 2.3902950286865234.
281
+ [ Wed Sep 14 14:58:03 2022 ] Top1: 52.47%
282
+ [ Wed Sep 14 14:58:03 2022 ] Top5: 84.04%
283
+ [ Wed Sep 14 14:58:03 2022 ] Training epoch: 43
284
+ [ Wed Sep 14 14:58:31 2022 ] Batch(33/123) done. Loss: 0.1424 lr:0.100000 network_time: 0.0311
285
+ [ Wed Sep 14 14:59:36 2022 ] Eval epoch: 43
286
+ [ Wed Sep 14 15:00:08 2022 ] Mean test loss of 258 batches: 2.4120872020721436.
287
+ [ Wed Sep 14 15:00:08 2022 ] Top1: 49.75%
288
+ [ Wed Sep 14 15:00:08 2022 ] Top5: 81.69%
289
+ [ Wed Sep 14 15:00:08 2022 ] Training epoch: 44
290
+ [ Wed Sep 14 15:00:19 2022 ] Batch(10/123) done. Loss: 0.2357 lr:0.100000 network_time: 0.0313
291
+ [ Wed Sep 14 15:01:32 2022 ] Batch(110/123) done. Loss: 0.2504 lr:0.100000 network_time: 0.0287
292
+ [ Wed Sep 14 15:01:41 2022 ] Eval epoch: 44
293
+ [ Wed Sep 14 15:02:13 2022 ] Mean test loss of 258 batches: 1.7786136865615845.
294
+ [ Wed Sep 14 15:02:14 2022 ] Top1: 56.93%
295
+ [ Wed Sep 14 15:02:14 2022 ] Top5: 86.68%
296
+ [ Wed Sep 14 15:02:14 2022 ] Training epoch: 45
297
+ [ Wed Sep 14 15:03:21 2022 ] Batch(87/123) done. Loss: 0.2842 lr:0.100000 network_time: 0.0310
298
+ [ Wed Sep 14 15:03:47 2022 ] Eval epoch: 45
299
+ [ Wed Sep 14 15:04:19 2022 ] Mean test loss of 258 batches: 2.2016782760620117.
300
+ [ Wed Sep 14 15:04:19 2022 ] Top1: 55.22%
301
+ [ Wed Sep 14 15:04:19 2022 ] Top5: 86.05%
302
+ [ Wed Sep 14 15:04:19 2022 ] Training epoch: 46
303
+ [ Wed Sep 14 15:05:10 2022 ] Batch(64/123) done. Loss: 0.3073 lr:0.100000 network_time: 0.0285
304
+ [ Wed Sep 14 15:05:52 2022 ] Eval epoch: 46
305
+ [ Wed Sep 14 15:06:24 2022 ] Mean test loss of 258 batches: 2.2162413597106934.
306
+ [ Wed Sep 14 15:06:24 2022 ] Top1: 52.64%
307
+ [ Wed Sep 14 15:06:24 2022 ] Top5: 84.02%
308
+ [ Wed Sep 14 15:06:24 2022 ] Training epoch: 47
309
+ [ Wed Sep 14 15:06:58 2022 ] Batch(41/123) done. Loss: 0.6450 lr:0.100000 network_time: 0.0275
310
+ [ Wed Sep 14 15:07:57 2022 ] Eval epoch: 47
311
+ [ Wed Sep 14 15:08:29 2022 ] Mean test loss of 258 batches: 1.919111967086792.
312
+ [ Wed Sep 14 15:08:29 2022 ] Top1: 57.42%
313
+ [ Wed Sep 14 15:08:30 2022 ] Top5: 86.45%
314
+ [ Wed Sep 14 15:08:30 2022 ] Training epoch: 48
315
+ [ Wed Sep 14 15:08:46 2022 ] Batch(18/123) done. Loss: 0.1862 lr:0.100000 network_time: 0.0268
316
+ [ Wed Sep 14 15:09:59 2022 ] Batch(118/123) done. Loss: 0.2942 lr:0.100000 network_time: 0.0270
317
+ [ Wed Sep 14 15:10:02 2022 ] Eval epoch: 48
318
+ [ Wed Sep 14 15:10:35 2022 ] Mean test loss of 258 batches: 2.217275381088257.
319
+ [ Wed Sep 14 15:10:35 2022 ] Top1: 53.76%
320
+ [ Wed Sep 14 15:10:35 2022 ] Top5: 84.15%
321
+ [ Wed Sep 14 15:10:35 2022 ] Training epoch: 49
322
+ [ Wed Sep 14 15:11:48 2022 ] Batch(95/123) done. Loss: 0.2444 lr:0.100000 network_time: 0.0267
323
+ [ Wed Sep 14 15:12:08 2022 ] Eval epoch: 49
324
+ [ Wed Sep 14 15:12:40 2022 ] Mean test loss of 258 batches: 1.9189571142196655.
325
+ [ Wed Sep 14 15:12:40 2022 ] Top1: 56.74%
326
+ [ Wed Sep 14 15:12:41 2022 ] Top5: 87.08%
327
+ [ Wed Sep 14 15:12:41 2022 ] Training epoch: 50
328
+ [ Wed Sep 14 15:13:37 2022 ] Batch(72/123) done. Loss: 0.2989 lr:0.100000 network_time: 0.0322
329
+ [ Wed Sep 14 15:14:14 2022 ] Eval epoch: 50
330
+ [ Wed Sep 14 15:14:46 2022 ] Mean test loss of 258 batches: 1.8722784519195557.
331
+ [ Wed Sep 14 15:14:46 2022 ] Top1: 55.79%
332
+ [ Wed Sep 14 15:14:46 2022 ] Top5: 86.23%
333
+ [ Wed Sep 14 15:14:46 2022 ] Training epoch: 51
334
+ [ Wed Sep 14 15:15:26 2022 ] Batch(49/123) done. Loss: 0.1550 lr:0.100000 network_time: 0.0309
335
+ [ Wed Sep 14 15:16:19 2022 ] Eval epoch: 51
336
+ [ Wed Sep 14 15:16:51 2022 ] Mean test loss of 258 batches: 1.8202694654464722.
337
+ [ Wed Sep 14 15:16:51 2022 ] Top1: 57.06%
338
+ [ Wed Sep 14 15:16:51 2022 ] Top5: 87.21%
339
+ [ Wed Sep 14 15:16:51 2022 ] Training epoch: 52
340
+ [ Wed Sep 14 15:17:14 2022 ] Batch(26/123) done. Loss: 0.1605 lr:0.100000 network_time: 0.0289
341
+ [ Wed Sep 14 15:18:24 2022 ] Eval epoch: 52
342
+ [ Wed Sep 14 15:18:56 2022 ] Mean test loss of 258 batches: 2.127373695373535.
343
+ [ Wed Sep 14 15:18:56 2022 ] Top1: 53.71%
344
+ [ Wed Sep 14 15:18:57 2022 ] Top5: 84.94%
345
+ [ Wed Sep 14 15:18:57 2022 ] Training epoch: 53
346
+ [ Wed Sep 14 15:19:02 2022 ] Batch(3/123) done. Loss: 0.2372 lr:0.100000 network_time: 0.0279
347
+ [ Wed Sep 14 15:20:15 2022 ] Batch(103/123) done. Loss: 0.2728 lr:0.100000 network_time: 0.0277
348
+ [ Wed Sep 14 15:20:29 2022 ] Eval epoch: 53
349
+ [ Wed Sep 14 15:21:02 2022 ] Mean test loss of 258 batches: 2.177319049835205.
350
+ [ Wed Sep 14 15:21:02 2022 ] Top1: 51.14%
351
+ [ Wed Sep 14 15:21:02 2022 ] Top5: 83.88%
352
+ [ Wed Sep 14 15:21:02 2022 ] Training epoch: 54
353
+ [ Wed Sep 14 15:22:04 2022 ] Batch(80/123) done. Loss: 0.0702 lr:0.100000 network_time: 0.0273
354
+ [ Wed Sep 14 15:22:35 2022 ] Eval epoch: 54
355
+ [ Wed Sep 14 15:23:07 2022 ] Mean test loss of 258 batches: 1.9530134201049805.
356
+ [ Wed Sep 14 15:23:07 2022 ] Top1: 57.14%
357
+ [ Wed Sep 14 15:23:07 2022 ] Top5: 87.07%
358
+ [ Wed Sep 14 15:23:08 2022 ] Training epoch: 55
359
+ [ Wed Sep 14 15:23:53 2022 ] Batch(57/123) done. Loss: 0.1349 lr:0.100000 network_time: 0.0271
360
+ [ Wed Sep 14 15:24:40 2022 ] Eval epoch: 55
361
+ [ Wed Sep 14 15:25:12 2022 ] Mean test loss of 258 batches: 2.054978847503662.
362
+ [ Wed Sep 14 15:25:13 2022 ] Top1: 56.96%
363
+ [ Wed Sep 14 15:25:13 2022 ] Top5: 85.78%
364
+ [ Wed Sep 14 15:25:13 2022 ] Training epoch: 56
365
+ [ Wed Sep 14 15:25:42 2022 ] Batch(34/123) done. Loss: 0.1589 lr:0.100000 network_time: 0.0277
366
+ [ Wed Sep 14 15:26:46 2022 ] Eval epoch: 56
367
+ [ Wed Sep 14 15:27:18 2022 ] Mean test loss of 258 batches: 2.158838987350464.
368
+ [ Wed Sep 14 15:27:18 2022 ] Top1: 53.88%
369
+ [ Wed Sep 14 15:27:18 2022 ] Top5: 84.66%
370
+ [ Wed Sep 14 15:27:19 2022 ] Training epoch: 57
371
+ [ Wed Sep 14 15:27:30 2022 ] Batch(11/123) done. Loss: 0.3453 lr:0.100000 network_time: 0.0283
372
+ [ Wed Sep 14 15:28:43 2022 ] Batch(111/123) done. Loss: 0.2935 lr:0.100000 network_time: 0.0468
373
+ [ Wed Sep 14 15:28:51 2022 ] Eval epoch: 57
374
+ [ Wed Sep 14 15:29:24 2022 ] Mean test loss of 258 batches: 2.115934133529663.
375
+ [ Wed Sep 14 15:29:24 2022 ] Top1: 55.32%
376
+ [ Wed Sep 14 15:29:24 2022 ] Top5: 86.03%
377
+ [ Wed Sep 14 15:29:24 2022 ] Training epoch: 58
378
+ [ Wed Sep 14 15:30:32 2022 ] Batch(88/123) done. Loss: 0.3999 lr:0.100000 network_time: 0.0269
379
+ [ Wed Sep 14 15:30:57 2022 ] Eval epoch: 58
380
+ [ Wed Sep 14 15:31:29 2022 ] Mean test loss of 258 batches: 2.2344048023223877.
381
+ [ Wed Sep 14 15:31:29 2022 ] Top1: 53.47%
382
+ [ Wed Sep 14 15:31:29 2022 ] Top5: 84.13%
383
+ [ Wed Sep 14 15:31:29 2022 ] Training epoch: 59
384
+ [ Wed Sep 14 15:32:21 2022 ] Batch(65/123) done. Loss: 0.2181 lr:0.100000 network_time: 0.0271
385
+ [ Wed Sep 14 15:33:03 2022 ] Eval epoch: 59
386
+ [ Wed Sep 14 15:33:35 2022 ] Mean test loss of 258 batches: 2.1006672382354736.
387
+ [ Wed Sep 14 15:33:35 2022 ] Top1: 57.98%
388
+ [ Wed Sep 14 15:33:35 2022 ] Top5: 86.38%
389
+ [ Wed Sep 14 15:33:35 2022 ] Training epoch: 60
390
+ [ Wed Sep 14 15:34:10 2022 ] Batch(42/123) done. Loss: 0.1203 lr:0.100000 network_time: 0.0380
391
+ [ Wed Sep 14 15:35:08 2022 ] Eval epoch: 60
392
+ [ Wed Sep 14 15:35:40 2022 ] Mean test loss of 258 batches: 2.0645995140075684.
393
+ [ Wed Sep 14 15:35:41 2022 ] Top1: 56.20%
394
+ [ Wed Sep 14 15:35:41 2022 ] Top5: 84.93%
395
+ [ Wed Sep 14 15:35:41 2022 ] Training epoch: 61
396
+ [ Wed Sep 14 15:35:58 2022 ] Batch(19/123) done. Loss: 0.1308 lr:0.010000 network_time: 0.0309
397
+ [ Wed Sep 14 15:37:11 2022 ] Batch(119/123) done. Loss: 0.1849 lr:0.010000 network_time: 0.0279
398
+ [ Wed Sep 14 15:37:13 2022 ] Eval epoch: 61
399
+ [ Wed Sep 14 15:37:46 2022 ] Mean test loss of 258 batches: 1.7348182201385498.
400
+ [ Wed Sep 14 15:37:46 2022 ] Top1: 62.35%
401
+ [ Wed Sep 14 15:37:46 2022 ] Top5: 88.77%
402
+ [ Wed Sep 14 15:37:46 2022 ] Training epoch: 62
403
+ [ Wed Sep 14 15:38:59 2022 ] Batch(96/123) done. Loss: 0.0961 lr:0.010000 network_time: 0.0270
404
+ [ Wed Sep 14 15:39:19 2022 ] Eval epoch: 62
405
+ [ Wed Sep 14 15:39:51 2022 ] Mean test loss of 258 batches: 1.711524486541748.
406
+ [ Wed Sep 14 15:39:51 2022 ] Top1: 63.01%
407
+ [ Wed Sep 14 15:39:51 2022 ] Top5: 89.26%
408
+ [ Wed Sep 14 15:39:51 2022 ] Training epoch: 63
409
+ [ Wed Sep 14 15:40:48 2022 ] Batch(73/123) done. Loss: 0.0374 lr:0.010000 network_time: 0.0273
410
+ [ Wed Sep 14 15:41:24 2022 ] Eval epoch: 63
411
+ [ Wed Sep 14 15:41:56 2022 ] Mean test loss of 258 batches: 1.72061026096344.
412
+ [ Wed Sep 14 15:41:56 2022 ] Top1: 63.20%
413
+ [ Wed Sep 14 15:41:56 2022 ] Top5: 89.08%
414
+ [ Wed Sep 14 15:41:56 2022 ] Training epoch: 64
415
+ [ Wed Sep 14 15:42:37 2022 ] Batch(50/123) done. Loss: 0.0324 lr:0.010000 network_time: 0.0283
416
+ [ Wed Sep 14 15:43:29 2022 ] Eval epoch: 64
417
+ [ Wed Sep 14 15:44:02 2022 ] Mean test loss of 258 batches: 1.731319546699524.
418
+ [ Wed Sep 14 15:44:02 2022 ] Top1: 63.34%
419
+ [ Wed Sep 14 15:44:02 2022 ] Top5: 89.07%
420
+ [ Wed Sep 14 15:44:02 2022 ] Training epoch: 65
421
+ [ Wed Sep 14 15:44:25 2022 ] Batch(27/123) done. Loss: 0.0490 lr:0.010000 network_time: 0.0318
422
+ [ Wed Sep 14 15:45:35 2022 ] Eval epoch: 65
423
+ [ Wed Sep 14 15:46:07 2022 ] Mean test loss of 258 batches: 1.735913634300232.
424
+ [ Wed Sep 14 15:46:07 2022 ] Top1: 63.48%
425
+ [ Wed Sep 14 15:46:07 2022 ] Top5: 89.29%
426
+ [ Wed Sep 14 15:46:08 2022 ] Training epoch: 66
427
+ [ Wed Sep 14 15:46:14 2022 ] Batch(4/123) done. Loss: 0.0260 lr:0.010000 network_time: 0.0324
428
+ [ Wed Sep 14 15:47:27 2022 ] Batch(104/123) done. Loss: 0.0232 lr:0.010000 network_time: 0.0294
429
+ [ Wed Sep 14 15:47:40 2022 ] Eval epoch: 66
430
+ [ Wed Sep 14 15:48:13 2022 ] Mean test loss of 258 batches: 1.7407399415969849.
431
+ [ Wed Sep 14 15:48:13 2022 ] Top1: 63.42%
432
+ [ Wed Sep 14 15:48:13 2022 ] Top5: 89.34%
433
+ [ Wed Sep 14 15:48:13 2022 ] Training epoch: 67
434
+ [ Wed Sep 14 15:49:15 2022 ] Batch(81/123) done. Loss: 0.0287 lr:0.010000 network_time: 0.0282
435
+ [ Wed Sep 14 15:49:46 2022 ] Eval epoch: 67
436
+ [ Wed Sep 14 15:50:18 2022 ] Mean test loss of 258 batches: 1.7624691724777222.
437
+ [ Wed Sep 14 15:50:18 2022 ] Top1: 63.43%
438
+ [ Wed Sep 14 15:50:18 2022 ] Top5: 89.25%
439
+ [ Wed Sep 14 15:50:18 2022 ] Training epoch: 68
440
+ [ Wed Sep 14 15:51:04 2022 ] Batch(58/123) done. Loss: 0.0116 lr:0.010000 network_time: 0.0266
441
+ [ Wed Sep 14 15:51:51 2022 ] Eval epoch: 68
442
+ [ Wed Sep 14 15:52:23 2022 ] Mean test loss of 258 batches: 1.7701879739761353.
443
+ [ Wed Sep 14 15:52:24 2022 ] Top1: 63.40%
444
+ [ Wed Sep 14 15:52:24 2022 ] Top5: 89.14%
445
+ [ Wed Sep 14 15:52:24 2022 ] Training epoch: 69
446
+ [ Wed Sep 14 15:52:53 2022 ] Batch(35/123) done. Loss: 0.0248 lr:0.010000 network_time: 0.0269
447
+ [ Wed Sep 14 15:53:57 2022 ] Eval epoch: 69
448
+ [ Wed Sep 14 15:54:29 2022 ] Mean test loss of 258 batches: 1.7476320266723633.
449
+ [ Wed Sep 14 15:54:29 2022 ] Top1: 63.65%
450
+ [ Wed Sep 14 15:54:29 2022 ] Top5: 89.06%
451
+ [ Wed Sep 14 15:54:29 2022 ] Training epoch: 70
452
+ [ Wed Sep 14 15:54:42 2022 ] Batch(12/123) done. Loss: 0.0127 lr:0.010000 network_time: 0.0290
453
+ [ Wed Sep 14 15:55:55 2022 ] Batch(112/123) done. Loss: 0.0187 lr:0.010000 network_time: 0.0298
454
+ [ Wed Sep 14 15:56:02 2022 ] Eval epoch: 70
455
+ [ Wed Sep 14 15:56:35 2022 ] Mean test loss of 258 batches: 1.746610403060913.
456
+ [ Wed Sep 14 15:56:35 2022 ] Top1: 63.94%
457
+ [ Wed Sep 14 15:56:35 2022 ] Top5: 89.23%
458
+ [ Wed Sep 14 15:56:35 2022 ] Training epoch: 71
459
+ [ Wed Sep 14 15:57:43 2022 ] Batch(89/123) done. Loss: 0.0119 lr:0.010000 network_time: 0.0263
460
+ [ Wed Sep 14 15:58:08 2022 ] Eval epoch: 71
461
+ [ Wed Sep 14 15:58:40 2022 ] Mean test loss of 258 batches: 1.7734960317611694.
462
+ [ Wed Sep 14 15:58:40 2022 ] Top1: 63.64%
463
+ [ Wed Sep 14 15:58:40 2022 ] Top5: 89.36%
464
+ [ Wed Sep 14 15:58:40 2022 ] Training epoch: 72
465
+ [ Wed Sep 14 15:59:32 2022 ] Batch(66/123) done. Loss: 0.0212 lr:0.010000 network_time: 0.0329
466
+ [ Wed Sep 14 16:00:13 2022 ] Eval epoch: 72
467
+ [ Wed Sep 14 16:00:46 2022 ] Mean test loss of 258 batches: 1.7869070768356323.
468
+ [ Wed Sep 14 16:00:46 2022 ] Top1: 63.77%
469
+ [ Wed Sep 14 16:00:46 2022 ] Top5: 89.18%
470
+ [ Wed Sep 14 16:00:46 2022 ] Training epoch: 73
471
+ [ Wed Sep 14 16:01:21 2022 ] Batch(43/123) done. Loss: 0.0425 lr:0.010000 network_time: 0.0276
472
+ [ Wed Sep 14 16:02:19 2022 ] Eval epoch: 73
473
+ [ Wed Sep 14 16:02:51 2022 ] Mean test loss of 258 batches: 1.7861807346343994.
474
+ [ Wed Sep 14 16:02:51 2022 ] Top1: 63.49%
475
+ [ Wed Sep 14 16:02:52 2022 ] Top5: 89.21%
476
+ [ Wed Sep 14 16:02:52 2022 ] Training epoch: 74
477
+ [ Wed Sep 14 16:03:10 2022 ] Batch(20/123) done. Loss: 0.0187 lr:0.010000 network_time: 0.0348
478
+ [ Wed Sep 14 16:04:23 2022 ] Batch(120/123) done. Loss: 0.0111 lr:0.010000 network_time: 0.0269
479
+ [ Wed Sep 14 16:04:25 2022 ] Eval epoch: 74
480
+ [ Wed Sep 14 16:04:57 2022 ] Mean test loss of 258 batches: 1.8124498128890991.
481
+ [ Wed Sep 14 16:04:57 2022 ] Top1: 63.50%
482
+ [ Wed Sep 14 16:04:57 2022 ] Top5: 89.26%
483
+ [ Wed Sep 14 16:04:57 2022 ] Training epoch: 75
484
+ [ Wed Sep 14 16:06:11 2022 ] Batch(97/123) done. Loss: 0.0189 lr:0.010000 network_time: 0.0304
485
+ [ Wed Sep 14 16:06:30 2022 ] Eval epoch: 75
486
+ [ Wed Sep 14 16:07:02 2022 ] Mean test loss of 258 batches: 1.7966654300689697.
487
+ [ Wed Sep 14 16:07:02 2022 ] Top1: 63.86%
488
+ [ Wed Sep 14 16:07:02 2022 ] Top5: 89.43%
489
+ [ Wed Sep 14 16:07:02 2022 ] Training epoch: 76
490
+ [ Wed Sep 14 16:08:00 2022 ] Batch(74/123) done. Loss: 0.0252 lr:0.010000 network_time: 0.0298
491
+ [ Wed Sep 14 16:08:35 2022 ] Eval epoch: 76
492
+ [ Wed Sep 14 16:09:08 2022 ] Mean test loss of 258 batches: 1.8263705968856812.
493
+ [ Wed Sep 14 16:09:08 2022 ] Top1: 63.57%
494
+ [ Wed Sep 14 16:09:08 2022 ] Top5: 88.96%
495
+ [ Wed Sep 14 16:09:08 2022 ] Training epoch: 77
496
+ [ Wed Sep 14 16:09:49 2022 ] Batch(51/123) done. Loss: 0.0034 lr:0.010000 network_time: 0.0268
497
+ [ Wed Sep 14 16:10:41 2022 ] Eval epoch: 77
498
+ [ Wed Sep 14 16:11:13 2022 ] Mean test loss of 258 batches: 1.7833702564239502.
499
+ [ Wed Sep 14 16:11:13 2022 ] Top1: 63.86%
500
+ [ Wed Sep 14 16:11:13 2022 ] Top5: 89.27%
501
+ [ Wed Sep 14 16:11:13 2022 ] Training epoch: 78
502
+ [ Wed Sep 14 16:11:37 2022 ] Batch(28/123) done. Loss: 0.0234 lr:0.010000 network_time: 0.0280
503
+ [ Wed Sep 14 16:12:46 2022 ] Eval epoch: 78
504
+ [ Wed Sep 14 16:13:19 2022 ] Mean test loss of 258 batches: 1.8438570499420166.
505
+ [ Wed Sep 14 16:13:19 2022 ] Top1: 63.21%
506
+ [ Wed Sep 14 16:13:19 2022 ] Top5: 88.88%
507
+ [ Wed Sep 14 16:13:19 2022 ] Training epoch: 79
508
+ [ Wed Sep 14 16:13:26 2022 ] Batch(5/123) done. Loss: 0.0124 lr:0.010000 network_time: 0.0236
509
+ [ Wed Sep 14 16:14:39 2022 ] Batch(105/123) done. Loss: 0.0067 lr:0.010000 network_time: 0.0304
510
+ [ Wed Sep 14 16:14:52 2022 ] Eval epoch: 79
511
+ [ Wed Sep 14 16:15:24 2022 ] Mean test loss of 258 batches: 1.7994800806045532.
512
+ [ Wed Sep 14 16:15:24 2022 ] Top1: 63.84%
513
+ [ Wed Sep 14 16:15:24 2022 ] Top5: 89.23%
514
+ [ Wed Sep 14 16:15:24 2022 ] Training epoch: 80
515
+ [ Wed Sep 14 16:16:27 2022 ] Batch(82/123) done. Loss: 0.0106 lr:0.010000 network_time: 0.0276
516
+ [ Wed Sep 14 16:16:57 2022 ] Eval epoch: 80
517
+ [ Wed Sep 14 16:17:28 2022 ] Mean test loss of 258 batches: 1.8134398460388184.
518
+ [ Wed Sep 14 16:17:29 2022 ] Top1: 63.86%
519
+ [ Wed Sep 14 16:17:29 2022 ] Top5: 89.26%
520
+ [ Wed Sep 14 16:17:29 2022 ] Training epoch: 81
521
+ [ Wed Sep 14 16:18:15 2022 ] Batch(59/123) done. Loss: 0.0091 lr:0.001000 network_time: 0.0297
522
+ [ Wed Sep 14 16:19:02 2022 ] Eval epoch: 81
523
+ [ Wed Sep 14 16:19:34 2022 ] Mean test loss of 258 batches: 1.796744704246521.
524
+ [ Wed Sep 14 16:19:34 2022 ] Top1: 63.98%
525
+ [ Wed Sep 14 16:19:34 2022 ] Top5: 89.26%
526
+ [ Wed Sep 14 16:19:34 2022 ] Training epoch: 82
527
+ [ Wed Sep 14 16:20:04 2022 ] Batch(36/123) done. Loss: 0.0092 lr:0.001000 network_time: 0.0268
528
+ [ Wed Sep 14 16:21:07 2022 ] Eval epoch: 82
529
+ [ Wed Sep 14 16:21:40 2022 ] Mean test loss of 258 batches: 1.843143343925476.
530
+ [ Wed Sep 14 16:21:40 2022 ] Top1: 63.77%
531
+ [ Wed Sep 14 16:21:40 2022 ] Top5: 89.06%
532
+ [ Wed Sep 14 16:21:40 2022 ] Training epoch: 83
533
+ [ Wed Sep 14 16:21:53 2022 ] Batch(13/123) done. Loss: 0.0141 lr:0.001000 network_time: 0.0311
534
+ [ Wed Sep 14 16:23:06 2022 ] Batch(113/123) done. Loss: 0.0083 lr:0.001000 network_time: 0.0270
535
+ [ Wed Sep 14 16:23:13 2022 ] Eval epoch: 83
536
+ [ Wed Sep 14 16:23:45 2022 ] Mean test loss of 258 batches: 1.8037279844284058.
537
+ [ Wed Sep 14 16:23:45 2022 ] Top1: 63.89%
538
+ [ Wed Sep 14 16:23:45 2022 ] Top5: 89.27%
539
+ [ Wed Sep 14 16:23:45 2022 ] Training epoch: 84
540
+ [ Wed Sep 14 16:24:55 2022 ] Batch(90/123) done. Loss: 0.0043 lr:0.001000 network_time: 0.0329
541
+ [ Wed Sep 14 16:25:18 2022 ] Eval epoch: 84
542
+ [ Wed Sep 14 16:25:50 2022 ] Mean test loss of 258 batches: 1.8453911542892456.
543
+ [ Wed Sep 14 16:25:51 2022 ] Top1: 63.47%
544
+ [ Wed Sep 14 16:25:51 2022 ] Top5: 89.03%
545
+ [ Wed Sep 14 16:25:51 2022 ] Training epoch: 85
546
+ [ Wed Sep 14 16:26:43 2022 ] Batch(67/123) done. Loss: 0.0063 lr:0.001000 network_time: 0.0268
547
+ [ Wed Sep 14 16:27:24 2022 ] Eval epoch: 85
548
+ [ Wed Sep 14 16:27:56 2022 ] Mean test loss of 258 batches: 1.8150367736816406.
549
+ [ Wed Sep 14 16:27:56 2022 ] Top1: 63.74%
550
+ [ Wed Sep 14 16:27:56 2022 ] Top5: 89.11%
551
+ [ Wed Sep 14 16:27:56 2022 ] Training epoch: 86
552
+ [ Wed Sep 14 16:28:32 2022 ] Batch(44/123) done. Loss: 0.0100 lr:0.001000 network_time: 0.0273
553
+ [ Wed Sep 14 16:29:29 2022 ] Eval epoch: 86
554
+ [ Wed Sep 14 16:30:01 2022 ] Mean test loss of 258 batches: 1.7877106666564941.
555
+ [ Wed Sep 14 16:30:01 2022 ] Top1: 64.03%
556
+ [ Wed Sep 14 16:30:01 2022 ] Top5: 89.32%
557
+ [ Wed Sep 14 16:30:01 2022 ] Training epoch: 87
558
+ [ Wed Sep 14 16:30:21 2022 ] Batch(21/123) done. Loss: 0.0049 lr:0.001000 network_time: 0.0266
559
+ [ Wed Sep 14 16:31:34 2022 ] Batch(121/123) done. Loss: 0.0076 lr:0.001000 network_time: 0.0262
560
+ [ Wed Sep 14 16:31:35 2022 ] Eval epoch: 87
561
+ [ Wed Sep 14 16:32:07 2022 ] Mean test loss of 258 batches: 1.8418843746185303.
562
+ [ Wed Sep 14 16:32:07 2022 ] Top1: 63.81%
563
+ [ Wed Sep 14 16:32:07 2022 ] Top5: 88.97%
564
+ [ Wed Sep 14 16:32:07 2022 ] Training epoch: 88
565
+ [ Wed Sep 14 16:33:23 2022 ] Batch(98/123) done. Loss: 0.0163 lr:0.001000 network_time: 0.0261
566
+ [ Wed Sep 14 16:33:41 2022 ] Eval epoch: 88
567
+ [ Wed Sep 14 16:34:13 2022 ] Mean test loss of 258 batches: 1.8131529092788696.
568
+ [ Wed Sep 14 16:34:13 2022 ] Top1: 63.85%
569
+ [ Wed Sep 14 16:34:13 2022 ] Top5: 89.12%
570
+ [ Wed Sep 14 16:34:13 2022 ] Training epoch: 89
571
+ [ Wed Sep 14 16:35:11 2022 ] Batch(75/123) done. Loss: 0.0171 lr:0.001000 network_time: 0.0283
572
+ [ Wed Sep 14 16:35:46 2022 ] Eval epoch: 89
573
+ [ Wed Sep 14 16:36:18 2022 ] Mean test loss of 258 batches: 1.7912170886993408.
574
+ [ Wed Sep 14 16:36:18 2022 ] Top1: 64.00%
575
+ [ Wed Sep 14 16:36:18 2022 ] Top5: 89.34%
576
+ [ Wed Sep 14 16:36:18 2022 ] Training epoch: 90
577
+ [ Wed Sep 14 16:37:00 2022 ] Batch(52/123) done. Loss: 0.0198 lr:0.001000 network_time: 0.0267
578
+ [ Wed Sep 14 16:37:51 2022 ] Eval epoch: 90
579
+ [ Wed Sep 14 16:38:23 2022 ] Mean test loss of 258 batches: 1.8016085624694824.
580
+ [ Wed Sep 14 16:38:24 2022 ] Top1: 64.03%
581
+ [ Wed Sep 14 16:38:24 2022 ] Top5: 89.33%
582
+ [ Wed Sep 14 16:38:24 2022 ] Training epoch: 91
583
+ [ Wed Sep 14 16:38:49 2022 ] Batch(29/123) done. Loss: 0.0090 lr:0.001000 network_time: 0.0474
584
+ [ Wed Sep 14 16:39:57 2022 ] Eval epoch: 91
585
+ [ Wed Sep 14 16:40:29 2022 ] Mean test loss of 258 batches: 1.809605360031128.
586
+ [ Wed Sep 14 16:40:29 2022 ] Top1: 64.01%
587
+ [ Wed Sep 14 16:40:29 2022 ] Top5: 89.17%
588
+ [ Wed Sep 14 16:40:29 2022 ] Training epoch: 92
589
+ [ Wed Sep 14 16:40:37 2022 ] Batch(6/123) done. Loss: 0.0152 lr:0.001000 network_time: 0.0268
590
+ [ Wed Sep 14 16:41:50 2022 ] Batch(106/123) done. Loss: 0.0232 lr:0.001000 network_time: 0.0269
591
+ [ Wed Sep 14 16:42:02 2022 ] Eval epoch: 92
592
+ [ Wed Sep 14 16:42:34 2022 ] Mean test loss of 258 batches: 1.8451917171478271.
593
+ [ Wed Sep 14 16:42:34 2022 ] Top1: 63.58%
594
+ [ Wed Sep 14 16:42:35 2022 ] Top5: 89.08%
595
+ [ Wed Sep 14 16:42:35 2022 ] Training epoch: 93
596
+ [ Wed Sep 14 16:43:39 2022 ] Batch(83/123) done. Loss: 0.0068 lr:0.001000 network_time: 0.0269
597
+ [ Wed Sep 14 16:44:08 2022 ] Eval epoch: 93
598
+ [ Wed Sep 14 16:44:40 2022 ] Mean test loss of 258 batches: 1.8375056982040405.
599
+ [ Wed Sep 14 16:44:40 2022 ] Top1: 63.52%
600
+ [ Wed Sep 14 16:44:40 2022 ] Top5: 89.05%
601
+ [ Wed Sep 14 16:44:40 2022 ] Training epoch: 94
602
+ [ Wed Sep 14 16:45:28 2022 ] Batch(60/123) done. Loss: 0.0073 lr:0.001000 network_time: 0.0271
603
+ [ Wed Sep 14 16:46:13 2022 ] Eval epoch: 94
604
+ [ Wed Sep 14 16:46:45 2022 ] Mean test loss of 258 batches: 1.7938319444656372.
605
+ [ Wed Sep 14 16:46:46 2022 ] Top1: 64.17%
606
+ [ Wed Sep 14 16:46:46 2022 ] Top5: 89.06%
607
+ [ Wed Sep 14 16:46:46 2022 ] Training epoch: 95
608
+ [ Wed Sep 14 16:47:17 2022 ] Batch(37/123) done. Loss: 0.0082 lr:0.001000 network_time: 0.0267
609
+ [ Wed Sep 14 16:48:19 2022 ] Eval epoch: 95
610
+ [ Wed Sep 14 16:48:52 2022 ] Mean test loss of 258 batches: 1.8545280694961548.
611
+ [ Wed Sep 14 16:48:52 2022 ] Top1: 63.48%
612
+ [ Wed Sep 14 16:48:52 2022 ] Top5: 89.26%
613
+ [ Wed Sep 14 16:48:52 2022 ] Training epoch: 96
614
+ [ Wed Sep 14 16:49:06 2022 ] Batch(14/123) done. Loss: 0.0101 lr:0.001000 network_time: 0.0299
615
+ [ Wed Sep 14 16:50:19 2022 ] Batch(114/123) done. Loss: 0.0086 lr:0.001000 network_time: 0.0273
616
+ [ Wed Sep 14 16:50:25 2022 ] Eval epoch: 96
617
+ [ Wed Sep 14 16:50:58 2022 ] Mean test loss of 258 batches: 1.7887837886810303.
618
+ [ Wed Sep 14 16:50:58 2022 ] Top1: 64.00%
619
+ [ Wed Sep 14 16:50:58 2022 ] Top5: 89.36%
620
+ [ Wed Sep 14 16:50:58 2022 ] Training epoch: 97
621
+ [ Wed Sep 14 16:52:08 2022 ] Batch(91/123) done. Loss: 0.0042 lr:0.001000 network_time: 0.0311
622
+ [ Wed Sep 14 16:52:31 2022 ] Eval epoch: 97
623
+ [ Wed Sep 14 16:53:03 2022 ] Mean test loss of 258 batches: 1.8494559526443481.
624
+ [ Wed Sep 14 16:53:03 2022 ] Top1: 63.47%
625
+ [ Wed Sep 14 16:53:03 2022 ] Top5: 89.13%
626
+ [ Wed Sep 14 16:53:03 2022 ] Training epoch: 98
627
+ [ Wed Sep 14 16:53:56 2022 ] Batch(68/123) done. Loss: 0.0138 lr:0.001000 network_time: 0.0329
628
+ [ Wed Sep 14 16:54:36 2022 ] Eval epoch: 98
629
+ [ Wed Sep 14 16:55:08 2022 ] Mean test loss of 258 batches: 1.7884374856948853.
630
+ [ Wed Sep 14 16:55:08 2022 ] Top1: 63.95%
631
+ [ Wed Sep 14 16:55:08 2022 ] Top5: 89.45%
632
+ [ Wed Sep 14 16:55:08 2022 ] Training epoch: 99
633
+ [ Wed Sep 14 16:55:45 2022 ] Batch(45/123) done. Loss: 0.0193 lr:0.001000 network_time: 0.0327
634
+ [ Wed Sep 14 16:56:41 2022 ] Eval epoch: 99
635
+ [ Wed Sep 14 16:57:14 2022 ] Mean test loss of 258 batches: 1.8234803676605225.
636
+ [ Wed Sep 14 16:57:14 2022 ] Top1: 63.93%
637
+ [ Wed Sep 14 16:57:14 2022 ] Top5: 89.20%
638
+ [ Wed Sep 14 16:57:14 2022 ] Training epoch: 100
639
+ [ Wed Sep 14 16:57:34 2022 ] Batch(22/123) done. Loss: 0.0035 lr:0.001000 network_time: 0.0282
640
+ [ Wed Sep 14 16:58:47 2022 ] Batch(122/123) done. Loss: 0.0035 lr:0.001000 network_time: 0.0290
641
+ [ Wed Sep 14 16:58:47 2022 ] Eval epoch: 100
642
+ [ Wed Sep 14 16:59:19 2022 ] Mean test loss of 258 batches: 1.8526291847229004.
643
+ [ Wed Sep 14 16:59:19 2022 ] Top1: 63.60%
644
+ [ Wed Sep 14 16:59:19 2022 ] Top5: 89.09%
645
+ [ Wed Sep 14 16:59:20 2022 ] Training epoch: 101
646
+ [ Wed Sep 14 17:00:36 2022 ] Batch(99/123) done. Loss: 0.0040 lr:0.000100 network_time: 0.0436
647
+ [ Wed Sep 14 17:00:53 2022 ] Eval epoch: 101
648
+ [ Wed Sep 14 17:01:25 2022 ] Mean test loss of 258 batches: 1.860023021697998.
649
+ [ Wed Sep 14 17:01:25 2022 ] Top1: 63.39%
650
+ [ Wed Sep 14 17:01:26 2022 ] Top5: 89.03%
651
+ [ Wed Sep 14 17:01:26 2022 ] Training epoch: 102
652
+ [ Wed Sep 14 17:02:25 2022 ] Batch(76/123) done. Loss: 0.0103 lr:0.000100 network_time: 0.0254
653
+ [ Wed Sep 14 17:02:59 2022 ] Eval epoch: 102
654
+ [ Wed Sep 14 17:03:31 2022 ] Mean test loss of 258 batches: 1.8325798511505127.
655
+ [ Wed Sep 14 17:03:31 2022 ] Top1: 63.43%
656
+ [ Wed Sep 14 17:03:31 2022 ] Top5: 89.12%
657
+ [ Wed Sep 14 17:03:31 2022 ] Training epoch: 103
658
+ [ Wed Sep 14 17:04:13 2022 ] Batch(53/123) done. Loss: 0.0046 lr:0.000100 network_time: 0.0269
659
+ [ Wed Sep 14 17:05:04 2022 ] Eval epoch: 103
660
+ [ Wed Sep 14 17:05:36 2022 ] Mean test loss of 258 batches: 1.7819689512252808.
661
+ [ Wed Sep 14 17:05:36 2022 ] Top1: 64.24%
662
+ [ Wed Sep 14 17:05:36 2022 ] Top5: 89.63%
663
+ [ Wed Sep 14 17:05:36 2022 ] Training epoch: 104
664
+ [ Wed Sep 14 17:06:02 2022 ] Batch(30/123) done. Loss: 0.0073 lr:0.000100 network_time: 0.0252
665
+ [ Wed Sep 14 17:07:09 2022 ] Eval epoch: 104
666
+ [ Wed Sep 14 17:07:42 2022 ] Mean test loss of 258 batches: 1.8267995119094849.
667
+ [ Wed Sep 14 17:07:42 2022 ] Top1: 63.73%
668
+ [ Wed Sep 14 17:07:42 2022 ] Top5: 89.19%
669
+ [ Wed Sep 14 17:07:42 2022 ] Training epoch: 105
670
+ [ Wed Sep 14 17:07:51 2022 ] Batch(7/123) done. Loss: 0.0092 lr:0.000100 network_time: 0.0260
671
+ [ Wed Sep 14 17:09:04 2022 ] Batch(107/123) done. Loss: 0.0127 lr:0.000100 network_time: 0.0264
672
+ [ Wed Sep 14 17:09:15 2022 ] Eval epoch: 105
673
+ [ Wed Sep 14 17:09:47 2022 ] Mean test loss of 258 batches: 1.8645508289337158.
674
+ [ Wed Sep 14 17:09:47 2022 ] Top1: 63.26%
675
+ [ Wed Sep 14 17:09:47 2022 ] Top5: 88.95%
676
+ [ Wed Sep 14 17:09:47 2022 ] Training epoch: 106
677
+ [ Wed Sep 14 17:10:52 2022 ] Batch(84/123) done. Loss: 0.0067 lr:0.000100 network_time: 0.0281
678
+ [ Wed Sep 14 17:11:20 2022 ] Eval epoch: 106
679
+ [ Wed Sep 14 17:11:53 2022 ] Mean test loss of 258 batches: 1.7852859497070312.
680
+ [ Wed Sep 14 17:11:53 2022 ] Top1: 64.12%
681
+ [ Wed Sep 14 17:11:53 2022 ] Top5: 89.42%
682
+ [ Wed Sep 14 17:11:53 2022 ] Training epoch: 107
683
+ [ Wed Sep 14 17:12:41 2022 ] Batch(61/123) done. Loss: 0.0074 lr:0.000100 network_time: 0.0451
684
+ [ Wed Sep 14 17:13:26 2022 ] Eval epoch: 107
685
+ [ Wed Sep 14 17:13:58 2022 ] Mean test loss of 258 batches: 1.8736193180084229.
686
+ [ Wed Sep 14 17:13:58 2022 ] Top1: 63.23%
687
+ [ Wed Sep 14 17:13:58 2022 ] Top5: 88.91%
688
+ [ Wed Sep 14 17:13:58 2022 ] Training epoch: 108
689
+ [ Wed Sep 14 17:14:29 2022 ] Batch(38/123) done. Loss: 0.0282 lr:0.000100 network_time: 0.0286
690
+ [ Wed Sep 14 17:15:31 2022 ] Eval epoch: 108
691
+ [ Wed Sep 14 17:16:03 2022 ] Mean test loss of 258 batches: 1.816180944442749.
692
+ [ Wed Sep 14 17:16:03 2022 ] Top1: 64.07%
693
+ [ Wed Sep 14 17:16:03 2022 ] Top5: 89.38%
694
+ [ Wed Sep 14 17:16:03 2022 ] Training epoch: 109
695
+ [ Wed Sep 14 17:16:18 2022 ] Batch(15/123) done. Loss: 0.0097 lr:0.000100 network_time: 0.0254
696
+ [ Wed Sep 14 17:17:30 2022 ] Batch(115/123) done. Loss: 0.0088 lr:0.000100 network_time: 0.0285
697
+ [ Wed Sep 14 17:17:36 2022 ] Eval epoch: 109
698
+ [ Wed Sep 14 17:18:08 2022 ] Mean test loss of 258 batches: 1.824914813041687.
699
+ [ Wed Sep 14 17:18:08 2022 ] Top1: 64.07%
700
+ [ Wed Sep 14 17:18:08 2022 ] Top5: 89.18%
701
+ [ Wed Sep 14 17:18:09 2022 ] Training epoch: 110
702
+ [ Wed Sep 14 17:19:19 2022 ] Batch(92/123) done. Loss: 0.0085 lr:0.000100 network_time: 0.0321
703
+ [ Wed Sep 14 17:19:41 2022 ] Eval epoch: 110
704
+ [ Wed Sep 14 17:20:14 2022 ] Mean test loss of 258 batches: 1.8336659669876099.
705
+ [ Wed Sep 14 17:20:14 2022 ] Top1: 63.71%
706
+ [ Wed Sep 14 17:20:14 2022 ] Top5: 89.11%
707
+ [ Wed Sep 14 17:20:14 2022 ] Training epoch: 111
708
+ [ Wed Sep 14 17:21:08 2022 ] Batch(69/123) done. Loss: 0.0051 lr:0.000100 network_time: 0.0321
709
+ [ Wed Sep 14 17:21:47 2022 ] Eval epoch: 111
710
+ [ Wed Sep 14 17:22:19 2022 ] Mean test loss of 258 batches: 1.8149291276931763.
711
+ [ Wed Sep 14 17:22:19 2022 ] Top1: 64.02%
712
+ [ Wed Sep 14 17:22:19 2022 ] Top5: 89.28%
713
+ [ Wed Sep 14 17:22:19 2022 ] Training epoch: 112
714
+ [ Wed Sep 14 17:22:57 2022 ] Batch(46/123) done. Loss: 0.0036 lr:0.000100 network_time: 0.0277
715
+ [ Wed Sep 14 17:23:53 2022 ] Eval epoch: 112
716
+ [ Wed Sep 14 17:24:24 2022 ] Mean test loss of 258 batches: 1.805535912513733.
717
+ [ Wed Sep 14 17:24:25 2022 ] Top1: 63.88%
718
+ [ Wed Sep 14 17:24:25 2022 ] Top5: 89.33%
719
+ [ Wed Sep 14 17:24:25 2022 ] Training epoch: 113
720
+ [ Wed Sep 14 17:24:45 2022 ] Batch(23/123) done. Loss: 0.0105 lr:0.000100 network_time: 0.0273
721
+ [ Wed Sep 14 17:25:58 2022 ] Eval epoch: 113
722
+ [ Wed Sep 14 17:26:30 2022 ] Mean test loss of 258 batches: 1.8021278381347656.
723
+ [ Wed Sep 14 17:26:31 2022 ] Top1: 63.98%
724
+ [ Wed Sep 14 17:26:31 2022 ] Top5: 89.25%
725
+ [ Wed Sep 14 17:26:31 2022 ] Training epoch: 114
726
+ [ Wed Sep 14 17:26:34 2022 ] Batch(0/123) done. Loss: 0.0183 lr:0.000100 network_time: 0.0544
727
+ [ Wed Sep 14 17:27:47 2022 ] Batch(100/123) done. Loss: 0.0082 lr:0.000100 network_time: 0.0264
728
+ [ Wed Sep 14 17:28:03 2022 ] Eval epoch: 114
729
+ [ Wed Sep 14 17:28:36 2022 ] Mean test loss of 258 batches: 1.8260608911514282.
730
+ [ Wed Sep 14 17:28:36 2022 ] Top1: 63.96%
731
+ [ Wed Sep 14 17:28:36 2022 ] Top5: 89.14%
732
+ [ Wed Sep 14 17:28:36 2022 ] Training epoch: 115
733
+ [ Wed Sep 14 17:29:36 2022 ] Batch(77/123) done. Loss: 0.0054 lr:0.000100 network_time: 0.0308
734
+ [ Wed Sep 14 17:30:09 2022 ] Eval epoch: 115
735
+ [ Wed Sep 14 17:30:41 2022 ] Mean test loss of 258 batches: 1.7994446754455566.
736
+ [ Wed Sep 14 17:30:41 2022 ] Top1: 64.01%
737
+ [ Wed Sep 14 17:30:41 2022 ] Top5: 89.37%
738
+ [ Wed Sep 14 17:30:41 2022 ] Training epoch: 116
739
+ [ Wed Sep 14 17:31:24 2022 ] Batch(54/123) done. Loss: 0.0119 lr:0.000100 network_time: 0.0250
740
+ [ Wed Sep 14 17:32:14 2022 ] Eval epoch: 116
741
+ [ Wed Sep 14 17:32:47 2022 ] Mean test loss of 258 batches: 1.830991506576538.
742
+ [ Wed Sep 14 17:32:47 2022 ] Top1: 63.83%
743
+ [ Wed Sep 14 17:32:47 2022 ] Top5: 89.22%
744
+ [ Wed Sep 14 17:32:47 2022 ] Training epoch: 117
745
+ [ Wed Sep 14 17:33:14 2022 ] Batch(31/123) done. Loss: 0.0070 lr:0.000100 network_time: 0.0278
746
+ [ Wed Sep 14 17:34:20 2022 ] Eval epoch: 117
747
+ [ Wed Sep 14 17:34:53 2022 ] Mean test loss of 258 batches: 1.827774167060852.
748
+ [ Wed Sep 14 17:34:53 2022 ] Top1: 63.74%
749
+ [ Wed Sep 14 17:34:53 2022 ] Top5: 89.23%
750
+ [ Wed Sep 14 17:34:53 2022 ] Training epoch: 118
751
+ [ Wed Sep 14 17:35:03 2022 ] Batch(8/123) done. Loss: 0.0177 lr:0.000100 network_time: 0.0267
752
+ [ Wed Sep 14 17:36:16 2022 ] Batch(108/123) done. Loss: 0.0133 lr:0.000100 network_time: 0.0270
753
+ [ Wed Sep 14 17:36:26 2022 ] Eval epoch: 118
754
+ [ Wed Sep 14 17:36:59 2022 ] Mean test loss of 258 batches: 1.833808183670044.
755
+ [ Wed Sep 14 17:36:59 2022 ] Top1: 63.37%
756
+ [ Wed Sep 14 17:36:59 2022 ] Top5: 89.25%
757
+ [ Wed Sep 14 17:36:59 2022 ] Training epoch: 119
758
+ [ Wed Sep 14 17:38:05 2022 ] Batch(85/123) done. Loss: 0.0074 lr:0.000100 network_time: 0.0282
759
+ [ Wed Sep 14 17:38:32 2022 ] Eval epoch: 119
760
+ [ Wed Sep 14 17:39:04 2022 ] Mean test loss of 258 batches: 1.8108863830566406.
761
+ [ Wed Sep 14 17:39:04 2022 ] Top1: 64.01%
762
+ [ Wed Sep 14 17:39:04 2022 ] Top5: 89.40%
763
+ [ Wed Sep 14 17:39:04 2022 ] Training epoch: 120
764
+ [ Wed Sep 14 17:39:53 2022 ] Batch(62/123) done. Loss: 0.0039 lr:0.000100 network_time: 0.0308
765
+ [ Wed Sep 14 17:40:37 2022 ] Eval epoch: 120
766
+ [ Wed Sep 14 17:41:10 2022 ] Mean test loss of 258 batches: 1.8470932245254517.
767
+ [ Wed Sep 14 17:41:10 2022 ] Top1: 63.45%
768
+ [ Wed Sep 14 17:41:10 2022 ] Top5: 88.98%
769
+ [ Wed Sep 14 17:41:10 2022 ] Training epoch: 121
770
+ [ Wed Sep 14 17:41:43 2022 ] Batch(39/123) done. Loss: 0.0086 lr:0.000100 network_time: 0.0267
771
+ [ Wed Sep 14 17:42:43 2022 ] Eval epoch: 121
772
+ [ Wed Sep 14 17:43:15 2022 ] Mean test loss of 258 batches: 1.8321284055709839.
773
+ [ Wed Sep 14 17:43:16 2022 ] Top1: 63.67%
774
+ [ Wed Sep 14 17:43:16 2022 ] Top5: 89.13%
775
+ [ Wed Sep 14 17:43:16 2022 ] Training epoch: 122
776
+ [ Wed Sep 14 17:43:31 2022 ] Batch(16/123) done. Loss: 0.0431 lr:0.000100 network_time: 0.0316
777
+ [ Wed Sep 14 17:44:44 2022 ] Batch(116/123) done. Loss: 0.0082 lr:0.000100 network_time: 0.0269
778
+ [ Wed Sep 14 17:44:49 2022 ] Eval epoch: 122
779
+ [ Wed Sep 14 17:45:21 2022 ] Mean test loss of 258 batches: 1.8278491497039795.
780
+ [ Wed Sep 14 17:45:21 2022 ] Top1: 63.84%
781
+ [ Wed Sep 14 17:45:21 2022 ] Top5: 89.29%
782
+ [ Wed Sep 14 17:45:21 2022 ] Training epoch: 123
783
+ [ Wed Sep 14 17:46:33 2022 ] Batch(93/123) done. Loss: 0.0059 lr:0.000100 network_time: 0.0308
784
+ [ Wed Sep 14 17:46:54 2022 ] Eval epoch: 123
785
+ [ Wed Sep 14 17:47:27 2022 ] Mean test loss of 258 batches: 1.840074062347412.
786
+ [ Wed Sep 14 17:47:27 2022 ] Top1: 63.57%
787
+ [ Wed Sep 14 17:47:27 2022 ] Top5: 89.11%
788
+ [ Wed Sep 14 17:47:27 2022 ] Training epoch: 124
789
+ [ Wed Sep 14 17:48:22 2022 ] Batch(70/123) done. Loss: 0.0087 lr:0.000100 network_time: 0.0309
790
+ [ Wed Sep 14 17:49:00 2022 ] Eval epoch: 124
791
+ [ Wed Sep 14 17:49:32 2022 ] Mean test loss of 258 batches: 1.8375566005706787.
792
+ [ Wed Sep 14 17:49:32 2022 ] Top1: 63.77%
793
+ [ Wed Sep 14 17:49:32 2022 ] Top5: 89.02%
794
+ [ Wed Sep 14 17:49:32 2022 ] Training epoch: 125
795
+ [ Wed Sep 14 17:50:11 2022 ] Batch(47/123) done. Loss: 0.0099 lr:0.000100 network_time: 0.0278
796
+ [ Wed Sep 14 17:51:06 2022 ] Eval epoch: 125
797
+ [ Wed Sep 14 17:51:38 2022 ] Mean test loss of 258 batches: 1.7994847297668457.
798
+ [ Wed Sep 14 17:51:38 2022 ] Top1: 64.32%
799
+ [ Wed Sep 14 17:51:38 2022 ] Top5: 89.43%
800
+ [ Wed Sep 14 17:51:38 2022 ] Training epoch: 126
801
+ [ Wed Sep 14 17:52:00 2022 ] Batch(24/123) done. Loss: 0.0080 lr:0.000100 network_time: 0.0276
802
+ [ Wed Sep 14 17:53:12 2022 ] Eval epoch: 126
803
+ [ Wed Sep 14 17:53:44 2022 ] Mean test loss of 258 batches: 1.8445631265640259.
804
+ [ Wed Sep 14 17:53:44 2022 ] Top1: 63.87%
805
+ [ Wed Sep 14 17:53:44 2022 ] Top5: 89.10%
806
+ [ Wed Sep 14 17:53:44 2022 ] Training epoch: 127
807
+ [ Wed Sep 14 17:53:49 2022 ] Batch(1/123) done. Loss: 0.0095 lr:0.000100 network_time: 0.0316
808
+ [ Wed Sep 14 17:55:02 2022 ] Batch(101/123) done. Loss: 0.0048 lr:0.000100 network_time: 0.0280
809
+ [ Wed Sep 14 17:55:17 2022 ] Eval epoch: 127
810
+ [ Wed Sep 14 17:55:50 2022 ] Mean test loss of 258 batches: 1.853245496749878.
811
+ [ Wed Sep 14 17:55:50 2022 ] Top1: 63.81%
812
+ [ Wed Sep 14 17:55:50 2022 ] Top5: 89.00%
813
+ [ Wed Sep 14 17:55:50 2022 ] Training epoch: 128
814
+ [ Wed Sep 14 17:56:51 2022 ] Batch(78/123) done. Loss: 0.0021 lr:0.000100 network_time: 0.0309
815
+ [ Wed Sep 14 17:57:23 2022 ] Eval epoch: 128
816
+ [ Wed Sep 14 17:57:55 2022 ] Mean test loss of 258 batches: 1.8543254137039185.
817
+ [ Wed Sep 14 17:57:55 2022 ] Top1: 63.38%
818
+ [ Wed Sep 14 17:57:56 2022 ] Top5: 89.08%
819
+ [ Wed Sep 14 17:57:56 2022 ] Training epoch: 129
820
+ [ Wed Sep 14 17:58:40 2022 ] Batch(55/123) done. Loss: 0.0053 lr:0.000100 network_time: 0.0322
821
+ [ Wed Sep 14 17:59:29 2022 ] Eval epoch: 129
822
+ [ Wed Sep 14 18:00:01 2022 ] Mean test loss of 258 batches: 1.8299524784088135.
823
+ [ Wed Sep 14 18:00:01 2022 ] Top1: 63.84%
824
+ [ Wed Sep 14 18:00:01 2022 ] Top5: 89.05%
825
+ [ Wed Sep 14 18:00:01 2022 ] Training epoch: 130
826
+ [ Wed Sep 14 18:00:28 2022 ] Batch(32/123) done. Loss: 0.0081 lr:0.000100 network_time: 0.0266
827
+ [ Wed Sep 14 18:01:34 2022 ] Eval epoch: 130
828
+ [ Wed Sep 14 18:02:07 2022 ] Mean test loss of 258 batches: 1.827720046043396.
829
+ [ Wed Sep 14 18:02:07 2022 ] Top1: 63.86%
830
+ [ Wed Sep 14 18:02:07 2022 ] Top5: 89.22%
831
+ [ Wed Sep 14 18:02:07 2022 ] Training epoch: 131
832
+ [ Wed Sep 14 18:02:18 2022 ] Batch(9/123) done. Loss: 0.0106 lr:0.000100 network_time: 0.0325
833
+ [ Wed Sep 14 18:03:31 2022 ] Batch(109/123) done. Loss: 0.0237 lr:0.000100 network_time: 0.0271
834
+ [ Wed Sep 14 18:03:40 2022 ] Eval epoch: 131
835
+ [ Wed Sep 14 18:04:13 2022 ] Mean test loss of 258 batches: 1.862596869468689.
836
+ [ Wed Sep 14 18:04:13 2022 ] Top1: 63.49%
837
+ [ Wed Sep 14 18:04:13 2022 ] Top5: 88.98%
838
+ [ Wed Sep 14 18:04:13 2022 ] Training epoch: 132
839
+ [ Wed Sep 14 18:05:19 2022 ] Batch(86/123) done. Loss: 0.0090 lr:0.000100 network_time: 0.0276
840
+ [ Wed Sep 14 18:05:46 2022 ] Eval epoch: 132
841
+ [ Wed Sep 14 18:06:18 2022 ] Mean test loss of 258 batches: 1.8246906995773315.
842
+ [ Wed Sep 14 18:06:18 2022 ] Top1: 63.55%
843
+ [ Wed Sep 14 18:06:18 2022 ] Top5: 89.17%
844
+ [ Wed Sep 14 18:06:18 2022 ] Training epoch: 133
845
+ [ Wed Sep 14 18:07:08 2022 ] Batch(63/123) done. Loss: 0.0128 lr:0.000100 network_time: 0.0278
846
+ [ Wed Sep 14 18:07:52 2022 ] Eval epoch: 133
847
+ [ Wed Sep 14 18:08:24 2022 ] Mean test loss of 258 batches: 1.843893051147461.
848
+ [ Wed Sep 14 18:08:24 2022 ] Top1: 63.65%
849
+ [ Wed Sep 14 18:08:24 2022 ] Top5: 88.95%
850
+ [ Wed Sep 14 18:08:24 2022 ] Training epoch: 134
851
+ [ Wed Sep 14 18:08:57 2022 ] Batch(40/123) done. Loss: 0.0098 lr:0.000100 network_time: 0.0323
852
+ [ Wed Sep 14 18:09:57 2022 ] Eval epoch: 134
853
+ [ Wed Sep 14 18:10:30 2022 ] Mean test loss of 258 batches: 1.8185917139053345.
854
+ [ Wed Sep 14 18:10:30 2022 ] Top1: 63.82%
855
+ [ Wed Sep 14 18:10:30 2022 ] Top5: 89.31%
856
+ [ Wed Sep 14 18:10:30 2022 ] Training epoch: 135
857
+ [ Wed Sep 14 18:10:46 2022 ] Batch(17/123) done. Loss: 0.0097 lr:0.000100 network_time: 0.0330
858
+ [ Wed Sep 14 18:11:59 2022 ] Batch(117/123) done. Loss: 0.0066 lr:0.000100 network_time: 0.0273
859
+ [ Wed Sep 14 18:12:03 2022 ] Eval epoch: 135
860
+ [ Wed Sep 14 18:12:36 2022 ] Mean test loss of 258 batches: 1.8323661088943481.
861
+ [ Wed Sep 14 18:12:36 2022 ] Top1: 63.80%
862
+ [ Wed Sep 14 18:12:36 2022 ] Top5: 89.21%
863
+ [ Wed Sep 14 18:12:36 2022 ] Training epoch: 136
864
+ [ Wed Sep 14 18:13:49 2022 ] Batch(94/123) done. Loss: 0.0069 lr:0.000100 network_time: 0.0274
865
+ [ Wed Sep 14 18:14:09 2022 ] Eval epoch: 136
866
+ [ Wed Sep 14 18:14:42 2022 ] Mean test loss of 258 batches: 1.8389002084732056.
867
+ [ Wed Sep 14 18:14:42 2022 ] Top1: 63.81%
868
+ [ Wed Sep 14 18:14:42 2022 ] Top5: 89.14%
869
+ [ Wed Sep 14 18:14:42 2022 ] Training epoch: 137
870
+ [ Wed Sep 14 18:15:37 2022 ] Batch(71/123) done. Loss: 0.0042 lr:0.000100 network_time: 0.0273
871
+ [ Wed Sep 14 18:16:15 2022 ] Eval epoch: 137
872
+ [ Wed Sep 14 18:16:47 2022 ] Mean test loss of 258 batches: 1.789794683456421.
873
+ [ Wed Sep 14 18:16:47 2022 ] Top1: 63.95%
874
+ [ Wed Sep 14 18:16:47 2022 ] Top5: 89.40%
875
+ [ Wed Sep 14 18:16:47 2022 ] Training epoch: 138
876
+ [ Wed Sep 14 18:17:26 2022 ] Batch(48/123) done. Loss: 0.0349 lr:0.000100 network_time: 0.0269
877
+ [ Wed Sep 14 18:18:20 2022 ] Eval epoch: 138
878
+ [ Wed Sep 14 18:18:53 2022 ] Mean test loss of 258 batches: 1.8242590427398682.
879
+ [ Wed Sep 14 18:18:53 2022 ] Top1: 63.83%
880
+ [ Wed Sep 14 18:18:53 2022 ] Top5: 89.13%
881
+ [ Wed Sep 14 18:18:53 2022 ] Training epoch: 139
882
+ [ Wed Sep 14 18:19:15 2022 ] Batch(25/123) done. Loss: 0.0128 lr:0.000100 network_time: 0.0299
883
+ [ Wed Sep 14 18:20:26 2022 ] Eval epoch: 139
884
+ [ Wed Sep 14 18:20:59 2022 ] Mean test loss of 258 batches: 1.815767526626587.
885
+ [ Wed Sep 14 18:20:59 2022 ] Top1: 63.94%
886
+ [ Wed Sep 14 18:20:59 2022 ] Top5: 89.37%
887
+ [ Wed Sep 14 18:20:59 2022 ] Training epoch: 140
888
+ [ Wed Sep 14 18:21:04 2022 ] Batch(2/123) done. Loss: 0.0084 lr:0.000100 network_time: 0.0263
889
+ [ Wed Sep 14 18:22:17 2022 ] Batch(102/123) done. Loss: 0.0049 lr:0.000100 network_time: 0.0259
890
+ [ Wed Sep 14 18:22:32 2022 ] Eval epoch: 140
891
+ [ Wed Sep 14 18:23:05 2022 ] Mean test loss of 258 batches: 1.8298001289367676.
892
+ [ Wed Sep 14 18:23:05 2022 ] Top1: 63.69%
893
+ [ Wed Sep 14 18:23:05 2022 ] Top5: 89.31%
ckpt/Others/Shift-GCN/ntu60_xsub/ntu_ShiftGCN_joint_xsub/shift_gcn.py ADDED
@@ -0,0 +1,216 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+ from torch.autograd import Variable
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append("./model/Temporal_shift/")
10
+
11
+ from cuda.shift import Shift
12
+
13
+
14
+ def import_class(name):
15
+ components = name.split('.')
16
+ mod = __import__(components[0])
17
+ for comp in components[1:]:
18
+ mod = getattr(mod, comp)
19
+ return mod
20
+
21
+ def conv_init(conv):
22
+ nn.init.kaiming_normal(conv.weight, mode='fan_out')
23
+ nn.init.constant(conv.bias, 0)
24
+
25
+
26
+ def bn_init(bn, scale):
27
+ nn.init.constant(bn.weight, scale)
28
+ nn.init.constant(bn.bias, 0)
29
+
30
+
31
+ class tcn(nn.Module):
32
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
33
+ super(tcn, self).__init__()
34
+ pad = int((kernel_size - 1) / 2)
35
+ self.conv = nn.Conv2d(in_channels, out_channels, kernel_size=(kernel_size, 1), padding=(pad, 0),
36
+ stride=(stride, 1))
37
+
38
+ self.bn = nn.BatchNorm2d(out_channels)
39
+ self.relu = nn.ReLU()
40
+ conv_init(self.conv)
41
+ bn_init(self.bn, 1)
42
+
43
+ def forward(self, x):
44
+ x = self.bn(self.conv(x))
45
+ return x
46
+
47
+
48
+ class Shift_tcn(nn.Module):
49
+ def __init__(self, in_channels, out_channels, kernel_size=9, stride=1):
50
+ super(Shift_tcn, self).__init__()
51
+
52
+ self.in_channels = in_channels
53
+ self.out_channels = out_channels
54
+
55
+ self.bn = nn.BatchNorm2d(in_channels)
56
+ self.bn2 = nn.BatchNorm2d(in_channels)
57
+ bn_init(self.bn2, 1)
58
+ self.relu = nn.ReLU(inplace=True)
59
+ self.shift_in = Shift(channel=in_channels, stride=1, init_scale=1)
60
+ self.shift_out = Shift(channel=out_channels, stride=stride, init_scale=1)
61
+
62
+ self.temporal_linear = nn.Conv2d(in_channels, out_channels, 1)
63
+ nn.init.kaiming_normal(self.temporal_linear.weight, mode='fan_out')
64
+
65
+ def forward(self, x):
66
+ x = self.bn(x)
67
+ # shift1
68
+ x = self.shift_in(x)
69
+ x = self.temporal_linear(x)
70
+ x = self.relu(x)
71
+ # shift2
72
+ x = self.shift_out(x)
73
+ x = self.bn2(x)
74
+ return x
75
+
76
+
77
+ class Shift_gcn(nn.Module):
78
+ def __init__(self, in_channels, out_channels, A, coff_embedding=4, num_subset=3):
79
+ super(Shift_gcn, self).__init__()
80
+ self.in_channels = in_channels
81
+ self.out_channels = out_channels
82
+ if in_channels != out_channels:
83
+ self.down = nn.Sequential(
84
+ nn.Conv2d(in_channels, out_channels, 1),
85
+ nn.BatchNorm2d(out_channels)
86
+ )
87
+ else:
88
+ self.down = lambda x: x
89
+
90
+ self.Linear_weight = nn.Parameter(torch.zeros(in_channels, out_channels, requires_grad=True, device='cuda'), requires_grad=True)
91
+ nn.init.normal_(self.Linear_weight, 0,math.sqrt(1.0/out_channels))
92
+
93
+ self.Linear_bias = nn.Parameter(torch.zeros(1,1,out_channels,requires_grad=True,device='cuda'),requires_grad=True)
94
+ nn.init.constant(self.Linear_bias, 0)
95
+
96
+ self.Feature_Mask = nn.Parameter(torch.ones(1,25,in_channels, requires_grad=True,device='cuda'),requires_grad=True)
97
+ nn.init.constant(self.Feature_Mask, 0)
98
+
99
+ self.bn = nn.BatchNorm1d(25*out_channels)
100
+ self.relu = nn.ReLU()
101
+
102
+ for m in self.modules():
103
+ if isinstance(m, nn.Conv2d):
104
+ conv_init(m)
105
+ elif isinstance(m, nn.BatchNorm2d):
106
+ bn_init(m, 1)
107
+
108
+ index_array = np.empty(25*in_channels).astype(np.int)
109
+ for i in range(25):
110
+ for j in range(in_channels):
111
+ index_array[i*in_channels + j] = (i*in_channels + j + j*in_channels)%(in_channels*25)
112
+ self.shift_in = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
113
+
114
+ index_array = np.empty(25*out_channels).astype(np.int)
115
+ for i in range(25):
116
+ for j in range(out_channels):
117
+ index_array[i*out_channels + j] = (i*out_channels + j - j*out_channels)%(out_channels*25)
118
+ self.shift_out = nn.Parameter(torch.from_numpy(index_array),requires_grad=False)
119
+
120
+
121
+ def forward(self, x0):
122
+ n, c, t, v = x0.size()
123
+ x = x0.permute(0,2,3,1).contiguous()
124
+
125
+ # shift1
126
+ x = x.view(n*t,v*c)
127
+ x = torch.index_select(x, 1, self.shift_in)
128
+ x = x.view(n*t,v,c)
129
+ x = x * (torch.tanh(self.Feature_Mask)+1)
130
+
131
+ x = torch.einsum('nwc,cd->nwd', (x, self.Linear_weight)).contiguous() # nt,v,c
132
+ x = x + self.Linear_bias
133
+
134
+ # shift2
135
+ x = x.view(n*t,-1)
136
+ x = torch.index_select(x, 1, self.shift_out)
137
+ x = self.bn(x)
138
+ x = x.view(n,t,v,self.out_channels).permute(0,3,1,2) # n,c,t,v
139
+
140
+ x = x + self.down(x0)
141
+ x = self.relu(x)
142
+ return x
143
+
144
+
145
+ class TCN_GCN_unit(nn.Module):
146
+ def __init__(self, in_channels, out_channels, A, stride=1, residual=True):
147
+ super(TCN_GCN_unit, self).__init__()
148
+ self.gcn1 = Shift_gcn(in_channels, out_channels, A)
149
+ self.tcn1 = Shift_tcn(out_channels, out_channels, stride=stride)
150
+ self.relu = nn.ReLU()
151
+
152
+ if not residual:
153
+ self.residual = lambda x: 0
154
+
155
+ elif (in_channels == out_channels) and (stride == 1):
156
+ self.residual = lambda x: x
157
+ else:
158
+ self.residual = tcn(in_channels, out_channels, kernel_size=1, stride=stride)
159
+
160
+ def forward(self, x):
161
+ x = self.tcn1(self.gcn1(x)) + self.residual(x)
162
+ return self.relu(x)
163
+
164
+
165
+ class Model(nn.Module):
166
+ def __init__(self, num_class=60, num_point=25, num_person=2, graph=None, graph_args=dict(), in_channels=3):
167
+ super(Model, self).__init__()
168
+
169
+ if graph is None:
170
+ raise ValueError()
171
+ else:
172
+ Graph = import_class(graph)
173
+ self.graph = Graph(**graph_args)
174
+
175
+ A = self.graph.A
176
+ self.data_bn = nn.BatchNorm1d(num_person * in_channels * num_point)
177
+
178
+ self.l1 = TCN_GCN_unit(3, 64, A, residual=False)
179
+ self.l2 = TCN_GCN_unit(64, 64, A)
180
+ self.l3 = TCN_GCN_unit(64, 64, A)
181
+ self.l4 = TCN_GCN_unit(64, 64, A)
182
+ self.l5 = TCN_GCN_unit(64, 128, A, stride=2)
183
+ self.l6 = TCN_GCN_unit(128, 128, A)
184
+ self.l7 = TCN_GCN_unit(128, 128, A)
185
+ self.l8 = TCN_GCN_unit(128, 256, A, stride=2)
186
+ self.l9 = TCN_GCN_unit(256, 256, A)
187
+ self.l10 = TCN_GCN_unit(256, 256, A)
188
+
189
+ self.fc = nn.Linear(256, num_class)
190
+ nn.init.normal(self.fc.weight, 0, math.sqrt(2. / num_class))
191
+ bn_init(self.data_bn, 1)
192
+
193
+ def forward(self, x):
194
+ N, C, T, V, M = x.size()
195
+
196
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T)
197
+ x = self.data_bn(x)
198
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
199
+
200
+ x = self.l1(x)
201
+ x = self.l2(x)
202
+ x = self.l3(x)
203
+ x = self.l4(x)
204
+ x = self.l5(x)
205
+ x = self.l6(x)
206
+ x = self.l7(x)
207
+ x = self.l8(x)
208
+ x = self.l9(x)
209
+ x = self.l10(x)
210
+
211
+ # N*M,C,T,V
212
+ c_new = x.size(1)
213
+ x = x.view(N, M, c_new, -1)
214
+ x = x.mean(3).mean(1)
215
+
216
+ return self.fc(x)
ckpt/Others/Shift-GCN/ntu60_xview/ntu_ShiftGCN_bone_motion_xview/config.yaml ADDED
@@ -0,0 +1,56 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ Experiment_name: ntu_ShiftGCN_bone_motion_xview
2
+ base_lr: 0.1
3
+ batch_size: 64
4
+ config: ./config/nturgbd-cross-view/train_bone_motion.yaml
5
+ device:
6
+ - 2
7
+ - 3
8
+ eval_interval: 5
9
+ feeder: feeders.feeder.Feeder
10
+ ignore_weights: []
11
+ log_interval: 100
12
+ model: model.shift_gcn.Model
13
+ model_args:
14
+ graph: graph.ntu_rgb_d.Graph
15
+ graph_args:
16
+ labeling_mode: spatial
17
+ num_class: 60
18
+ num_person: 2
19
+ num_point: 25
20
+ model_saved_name: ./save_models/ntu_ShiftGCN_bone_motion_xview
21
+ nesterov: true
22
+ num_epoch: 140
23
+ num_worker: 32
24
+ only_train_epoch: 1
25
+ only_train_part: true
26
+ optimizer: SGD
27
+ phase: train
28
+ print_log: true
29
+ save_interval: 2
30
+ save_score: false
31
+ seed: 1
32
+ show_topk:
33
+ - 1
34
+ - 5
35
+ start_epoch: 0
36
+ step:
37
+ - 60
38
+ - 80
39
+ - 100
40
+ test_batch_size: 64
41
+ test_feeder_args:
42
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_bone_motion.npy
43
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl
44
+ train_feeder_args:
45
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_bone_motion.npy
46
+ debug: false
47
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl
48
+ normalization: false
49
+ random_choose: false
50
+ random_move: false
51
+ random_shift: false
52
+ window_size: -1
53
+ warm_up_epoch: 0
54
+ weight_decay: 0.0001
55
+ weights: null
56
+ work_dir: ./work_dir/ntu_ShiftGCN_bone_motion_xview
ckpt/Others/Shift-GCN/ntu60_xview/ntu_ShiftGCN_bone_motion_xview/eval_results/best_acc.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:442f8022455e68553c84d6873a7ba2458ea90e3fe15beec0070d7d45b01ef029
3
+ size 5718404