firework8 commited on
Commit
5545deb
·
verified ·
1 Parent(s): f7acd00

Upload 64 files

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. ckpt/Others/MST-GCN/ntu120_xset/xset_b/AEMST_GCN.py +168 -0
  2. ckpt/Others/MST-GCN/ntu120_xset/xset_b/config.yaml +107 -0
  3. ckpt/Others/MST-GCN/ntu120_xset/xset_b/epoch1_test_score.pkl +3 -0
  4. ckpt/Others/MST-GCN/ntu120_xset/xset_b/log.txt +631 -0
  5. ckpt/Others/MST-GCN/ntu120_xset/xset_bm/AEMST_GCN.py +168 -0
  6. ckpt/Others/MST-GCN/ntu120_xset/xset_bm/config.yaml +107 -0
  7. ckpt/Others/MST-GCN/ntu120_xset/xset_bm/epoch1_test_score.pkl +3 -0
  8. ckpt/Others/MST-GCN/ntu120_xset/xset_bm/log.txt +631 -0
  9. ckpt/Others/MST-GCN/ntu120_xset/xset_j/AEMST_GCN.py +168 -0
  10. ckpt/Others/MST-GCN/ntu120_xset/xset_j/config.yaml +107 -0
  11. ckpt/Others/MST-GCN/ntu120_xset/xset_j/epoch1_test_score.pkl +3 -0
  12. ckpt/Others/MST-GCN/ntu120_xset/xset_j/log.txt +631 -0
  13. ckpt/Others/MST-GCN/ntu120_xset/xset_jm/AEMST_GCN.py +168 -0
  14. ckpt/Others/MST-GCN/ntu120_xset/xset_jm/config.yaml +107 -0
  15. ckpt/Others/MST-GCN/ntu120_xset/xset_jm/epoch1_test_score.pkl +3 -0
  16. ckpt/Others/MST-GCN/ntu120_xset/xset_jm/log.txt +631 -0
  17. ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/AEMST_GCN.py +168 -0
  18. ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/config.yaml +107 -0
  19. ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/epoch1_test_score.pkl +3 -0
  20. ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/log.txt +631 -0
  21. ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/AEMST_GCN.py +168 -0
  22. ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/config.yaml +107 -0
  23. ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/epoch1_test_score.pkl +3 -0
  24. ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/log.txt +631 -0
  25. ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/AEMST_GCN.py +168 -0
  26. ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/config.yaml +107 -0
  27. ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/epoch1_test_score.pkl +3 -0
  28. ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/log.txt +631 -0
  29. ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/AEMST_GCN.py +168 -0
  30. ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/config.yaml +107 -0
  31. ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/epoch1_test_score.pkl +3 -0
  32. ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/log.txt +631 -0
  33. ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/AEMST_GCN.py +168 -0
  34. ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/config.yaml +107 -0
  35. ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/epoch1_test_score.pkl +3 -0
  36. ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/log.txt +631 -0
  37. ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/AEMST_GCN.py +168 -0
  38. ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/config.yaml +107 -0
  39. ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/epoch1_test_score.pkl +3 -0
  40. ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/log.txt +631 -0
  41. ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/AEMST_GCN.py +168 -0
  42. ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/config.yaml +107 -0
  43. ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/epoch1_test_score.pkl +3 -0
  44. ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/log.txt +631 -0
  45. ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/AEMST_GCN.py +168 -0
  46. ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/config.yaml +107 -0
  47. ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/epoch1_test_score.pkl +3 -0
  48. ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/log.txt +631 -0
  49. ckpt/Others/MST-GCN/ntu60_xview/xview_b/AEMST_GCN.py +168 -0
  50. ckpt/Others/MST-GCN/ntu60_xview/xview_b/config.yaml +107 -0
ckpt/Others/MST-GCN/ntu120_xset/xset_b/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xset/xset_b/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xset_b.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xset_b/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xset_b
ckpt/Others/MST-GCN/ntu120_xset/xset_b/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ef06ec9a637932e5101e90189ee5aa4064133280b97542e0b0ef61f8e5f1ac0
3
+ size 34946665
ckpt/Others/MST-GCN/ntu120_xset/xset_b/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:35:36 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:35:37 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xset_b', 'model_saved_name': './runs/ntu120/xset_b/runs', 'config': 'config/ntu120/xset_b.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:35:37 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:35:37 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:40:00 2022 ] Mean training loss: 3.6086.
8
+ [ Wed Sep 7 21:40:00 2022 ] Time consumption: [Data]01%, [Network]98%
9
+ [ Wed Sep 7 21:40:00 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:40:00 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:44:23 2022 ] Mean training loss: 2.7809.
12
+ [ Wed Sep 7 21:44:23 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:44:23 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:44:23 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:48:46 2022 ] Mean training loss: 2.3419.
16
+ [ Wed Sep 7 21:48:46 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:48:46 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:48:46 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 21:53:09 2022 ] Mean training loss: 2.0429.
20
+ [ Wed Sep 7 21:53:09 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 21:53:09 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 21:53:09 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 21:57:32 2022 ] Mean training loss: 1.8437.
24
+ [ Wed Sep 7 21:57:32 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 21:57:32 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 21:57:32 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:01:55 2022 ] Mean training loss: 1.7147.
28
+ [ Wed Sep 7 22:01:55 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:01:55 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:01:55 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:06:17 2022 ] Mean training loss: 1.5963.
32
+ [ Wed Sep 7 22:06:17 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:06:17 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:06:17 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:10:40 2022 ] Mean training loss: 1.5492.
36
+ [ Wed Sep 7 22:10:40 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:10:40 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:10:40 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:15:03 2022 ] Mean training loss: 1.4981.
40
+ [ Wed Sep 7 22:15:03 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:15:03 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:15:03 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:19:25 2022 ] Mean training loss: 1.4923.
44
+ [ Wed Sep 7 22:19:25 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:19:25 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:19:25 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:23:48 2022 ] Mean training loss: 1.3916.
48
+ [ Wed Sep 7 22:23:48 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:23:48 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:23:48 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:28:11 2022 ] Mean training loss: 1.3541.
52
+ [ Wed Sep 7 22:28:11 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:28:11 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:28:11 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 22:32:34 2022 ] Mean training loss: 1.2929.
56
+ [ Wed Sep 7 22:32:34 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 22:32:34 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 22:32:34 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 22:36:57 2022 ] Mean training loss: 1.2638.
60
+ [ Wed Sep 7 22:36:57 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 22:36:57 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 22:36:57 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 22:41:20 2022 ] Mean training loss: 1.2265.
64
+ [ Wed Sep 7 22:41:20 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 22:41:20 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 22:41:20 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 22:45:42 2022 ] Mean training loss: 1.1839.
68
+ [ Wed Sep 7 22:45:42 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 22:45:42 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 22:45:42 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 22:50:05 2022 ] Mean training loss: 1.1774.
72
+ [ Wed Sep 7 22:50:05 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 22:50:05 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 22:50:05 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 22:54:28 2022 ] Mean training loss: 1.1332.
76
+ [ Wed Sep 7 22:54:28 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 22:54:28 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 22:54:28 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 22:58:50 2022 ] Mean training loss: 1.1246.
80
+ [ Wed Sep 7 22:58:50 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 22:58:50 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 22:58:50 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:03:13 2022 ] Mean training loss: 1.1059.
84
+ [ Wed Sep 7 23:03:13 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:03:13 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:03:13 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:07:35 2022 ] Mean training loss: 1.0691.
88
+ [ Wed Sep 7 23:07:35 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:07:35 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:07:35 2022 ] Learning rate: 0.15
91
+ [ Wed Sep 7 23:11:58 2022 ] Mean training loss: 1.0708.
92
+ [ Wed Sep 7 23:11:58 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Wed Sep 7 23:11:58 2022 ] Training epoch: 23
94
+ [ Wed Sep 7 23:11:58 2022 ] Learning rate: 0.15
95
+ [ Wed Sep 7 23:16:20 2022 ] Mean training loss: 1.0656.
96
+ [ Wed Sep 7 23:16:20 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Wed Sep 7 23:16:20 2022 ] Training epoch: 24
98
+ [ Wed Sep 7 23:16:20 2022 ] Learning rate: 0.15
99
+ [ Wed Sep 7 23:20:42 2022 ] Mean training loss: 1.0507.
100
+ [ Wed Sep 7 23:20:42 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Wed Sep 7 23:20:42 2022 ] Training epoch: 25
102
+ [ Wed Sep 7 23:20:42 2022 ] Learning rate: 0.15
103
+ [ Wed Sep 7 23:25:05 2022 ] Mean training loss: 1.0367.
104
+ [ Wed Sep 7 23:25:05 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Wed Sep 7 23:25:05 2022 ] Training epoch: 26
106
+ [ Wed Sep 7 23:25:05 2022 ] Learning rate: 0.15
107
+ [ Wed Sep 7 23:29:27 2022 ] Mean training loss: 1.0236.
108
+ [ Wed Sep 7 23:29:27 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Wed Sep 7 23:29:27 2022 ] Training epoch: 27
110
+ [ Wed Sep 7 23:29:27 2022 ] Learning rate: 0.15
111
+ [ Wed Sep 7 23:33:48 2022 ] Mean training loss: 1.0079.
112
+ [ Wed Sep 7 23:33:48 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Wed Sep 7 23:33:48 2022 ] Training epoch: 28
114
+ [ Wed Sep 7 23:33:48 2022 ] Learning rate: 0.15
115
+ [ Wed Sep 7 23:38:11 2022 ] Mean training loss: 0.9952.
116
+ [ Wed Sep 7 23:38:11 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Wed Sep 7 23:38:11 2022 ] Training epoch: 29
118
+ [ Wed Sep 7 23:38:11 2022 ] Learning rate: 0.15
119
+ [ Wed Sep 7 23:42:33 2022 ] Mean training loss: 1.0019.
120
+ [ Wed Sep 7 23:42:33 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Wed Sep 7 23:42:33 2022 ] Training epoch: 30
122
+ [ Wed Sep 7 23:42:33 2022 ] Learning rate: 0.15
123
+ [ Wed Sep 7 23:46:56 2022 ] Mean training loss: 0.9858.
124
+ [ Wed Sep 7 23:46:56 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Wed Sep 7 23:46:56 2022 ] Training epoch: 31
126
+ [ Wed Sep 7 23:46:56 2022 ] Learning rate: 0.15
127
+ [ Wed Sep 7 23:51:18 2022 ] Mean training loss: 0.9633.
128
+ [ Wed Sep 7 23:51:18 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Wed Sep 7 23:51:18 2022 ] Training epoch: 32
130
+ [ Wed Sep 7 23:51:18 2022 ] Learning rate: 0.15
131
+ [ Wed Sep 7 23:55:40 2022 ] Mean training loss: 0.9670.
132
+ [ Wed Sep 7 23:55:40 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Wed Sep 7 23:55:40 2022 ] Training epoch: 33
134
+ [ Wed Sep 7 23:55:40 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 00:00:02 2022 ] Mean training loss: 0.9783.
136
+ [ Thu Sep 8 00:00:02 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 00:00:02 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 00:00:02 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 00:04:24 2022 ] Mean training loss: 0.9647.
140
+ [ Thu Sep 8 00:04:24 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 00:04:24 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 00:04:24 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 00:08:46 2022 ] Mean training loss: 0.9546.
144
+ [ Thu Sep 8 00:08:46 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 00:08:46 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 00:08:46 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 00:13:08 2022 ] Mean training loss: 0.9352.
148
+ [ Thu Sep 8 00:13:08 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 00:13:08 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 00:13:08 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 00:17:30 2022 ] Mean training loss: 0.9385.
152
+ [ Thu Sep 8 00:17:30 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 00:17:30 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 00:17:30 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 00:21:52 2022 ] Mean training loss: 0.9287.
156
+ [ Thu Sep 8 00:21:52 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 00:21:52 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 00:21:52 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 00:26:14 2022 ] Mean training loss: 0.9366.
160
+ [ Thu Sep 8 00:26:14 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 00:26:14 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 00:26:14 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 00:30:37 2022 ] Mean training loss: 0.9410.
164
+ [ Thu Sep 8 00:30:37 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 00:30:37 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 00:30:37 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 00:34:59 2022 ] Mean training loss: 0.9381.
168
+ [ Thu Sep 8 00:34:59 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 00:34:59 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 00:34:59 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 00:39:21 2022 ] Mean training loss: 0.9230.
172
+ [ Thu Sep 8 00:39:21 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 00:39:21 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 00:39:21 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 00:43:43 2022 ] Mean training loss: 0.9217.
176
+ [ Thu Sep 8 00:43:43 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 00:43:43 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 00:43:43 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 00:48:05 2022 ] Mean training loss: 0.9077.
180
+ [ Thu Sep 8 00:48:05 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 00:48:05 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 00:48:05 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 00:52:26 2022 ] Mean training loss: 0.9124.
184
+ [ Thu Sep 8 00:52:26 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 00:52:26 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 00:52:26 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 00:56:48 2022 ] Mean training loss: 0.9260.
188
+ [ Thu Sep 8 00:56:48 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 00:56:48 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 00:56:48 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 01:01:10 2022 ] Mean training loss: 0.9053.
192
+ [ Thu Sep 8 01:01:10 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 01:01:10 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 01:01:10 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 01:05:33 2022 ] Mean training loss: 0.8866.
196
+ [ Thu Sep 8 01:05:33 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 01:05:33 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 01:05:33 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 01:09:54 2022 ] Mean training loss: 0.8913.
200
+ [ Thu Sep 8 01:09:54 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 01:09:54 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 01:09:54 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 01:14:15 2022 ] Mean training loss: 0.8890.
204
+ [ Thu Sep 8 01:14:15 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 01:14:15 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 01:14:15 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 01:18:37 2022 ] Mean training loss: 0.4241.
208
+ [ Thu Sep 8 01:18:37 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 01:18:37 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 01:26:33 2022 ] Epoch 51 Curr Acc: (34810/59477)58.53%
211
+ [ Thu Sep 8 01:26:33 2022 ] Epoch 51 Best Acc 58.53%
212
+ [ Thu Sep 8 01:26:33 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 01:26:33 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 01:30:54 2022 ] Mean training loss: 0.2982.
215
+ [ Thu Sep 8 01:30:54 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 01:30:54 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 01:38:43 2022 ] Epoch 52 Curr Acc: (35818/59477)60.22%
218
+ [ Thu Sep 8 01:38:43 2022 ] Epoch 52 Best Acc 60.22%
219
+ [ Thu Sep 8 01:38:43 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 01:38:43 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 01:43:04 2022 ] Mean training loss: 0.2447.
222
+ [ Thu Sep 8 01:43:04 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 01:43:04 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 01:50:52 2022 ] Epoch 53 Curr Acc: (36031/59477)60.58%
225
+ [ Thu Sep 8 01:50:52 2022 ] Epoch 53 Best Acc 60.58%
226
+ [ Thu Sep 8 01:50:52 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 01:50:52 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 01:55:14 2022 ] Mean training loss: 0.2083.
229
+ [ Thu Sep 8 01:55:14 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 01:55:14 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 02:03:02 2022 ] Epoch 54 Curr Acc: (35908/59477)60.37%
232
+ [ Thu Sep 8 02:03:02 2022 ] Epoch 53 Best Acc 60.58%
233
+ [ Thu Sep 8 02:03:02 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 02:03:02 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 02:07:23 2022 ] Mean training loss: 0.1723.
236
+ [ Thu Sep 8 02:07:23 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 02:07:23 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 02:15:11 2022 ] Epoch 55 Curr Acc: (35825/59477)60.23%
239
+ [ Thu Sep 8 02:15:11 2022 ] Epoch 53 Best Acc 60.58%
240
+ [ Thu Sep 8 02:15:11 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 02:15:11 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 02:19:32 2022 ] Mean training loss: 0.1578.
243
+ [ Thu Sep 8 02:19:32 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 02:19:32 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 02:27:21 2022 ] Epoch 56 Curr Acc: (35967/59477)60.47%
246
+ [ Thu Sep 8 02:27:21 2022 ] Epoch 53 Best Acc 60.58%
247
+ [ Thu Sep 8 02:27:21 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 02:27:21 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 02:31:42 2022 ] Mean training loss: 0.1368.
250
+ [ Thu Sep 8 02:31:42 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 02:31:42 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 02:39:30 2022 ] Epoch 57 Curr Acc: (35911/59477)60.38%
253
+ [ Thu Sep 8 02:39:30 2022 ] Epoch 53 Best Acc 60.58%
254
+ [ Thu Sep 8 02:39:30 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 02:39:30 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 02:43:52 2022 ] Mean training loss: 0.1265.
257
+ [ Thu Sep 8 02:43:52 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 02:43:52 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 02:51:40 2022 ] Epoch 58 Curr Acc: (35536/59477)59.75%
260
+ [ Thu Sep 8 02:51:40 2022 ] Epoch 53 Best Acc 60.58%
261
+ [ Thu Sep 8 02:51:40 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 02:51:40 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 02:56:01 2022 ] Mean training loss: 0.1084.
264
+ [ Thu Sep 8 02:56:01 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 02:56:01 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 03:03:49 2022 ] Epoch 59 Curr Acc: (35434/59477)59.58%
267
+ [ Thu Sep 8 03:03:49 2022 ] Epoch 53 Best Acc 60.58%
268
+ [ Thu Sep 8 03:03:49 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 03:03:49 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 03:08:11 2022 ] Mean training loss: 0.0970.
271
+ [ Thu Sep 8 03:08:11 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 03:08:11 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 03:15:59 2022 ] Epoch 60 Curr Acc: (35254/59477)59.27%
274
+ [ Thu Sep 8 03:15:59 2022 ] Epoch 53 Best Acc 60.58%
275
+ [ Thu Sep 8 03:15:59 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 03:15:59 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 03:20:20 2022 ] Mean training loss: 0.0871.
278
+ [ Thu Sep 8 03:20:20 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 03:20:20 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 03:28:08 2022 ] Epoch 61 Curr Acc: (35169/59477)59.13%
281
+ [ Thu Sep 8 03:28:08 2022 ] Epoch 53 Best Acc 60.58%
282
+ [ Thu Sep 8 03:28:08 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 03:28:08 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 03:32:29 2022 ] Mean training loss: 0.0778.
285
+ [ Thu Sep 8 03:32:29 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 03:32:29 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 03:40:17 2022 ] Epoch 62 Curr Acc: (35937/59477)60.42%
288
+ [ Thu Sep 8 03:40:17 2022 ] Epoch 53 Best Acc 60.58%
289
+ [ Thu Sep 8 03:40:17 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 03:40:17 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 03:44:38 2022 ] Mean training loss: 0.0739.
292
+ [ Thu Sep 8 03:44:38 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 03:44:38 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 03:52:26 2022 ] Epoch 63 Curr Acc: (35673/59477)59.98%
295
+ [ Thu Sep 8 03:52:26 2022 ] Epoch 53 Best Acc 60.58%
296
+ [ Thu Sep 8 03:52:26 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 03:52:26 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 03:56:47 2022 ] Mean training loss: 0.0715.
299
+ [ Thu Sep 8 03:56:47 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 03:56:47 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 04:04:36 2022 ] Epoch 64 Curr Acc: (35520/59477)59.72%
302
+ [ Thu Sep 8 04:04:36 2022 ] Epoch 53 Best Acc 60.58%
303
+ [ Thu Sep 8 04:04:36 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 04:04:36 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 04:08:57 2022 ] Mean training loss: 0.0650.
306
+ [ Thu Sep 8 04:08:57 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 04:08:57 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 04:16:45 2022 ] Epoch 65 Curr Acc: (35293/59477)59.34%
309
+ [ Thu Sep 8 04:16:45 2022 ] Epoch 53 Best Acc 60.58%
310
+ [ Thu Sep 8 04:16:45 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 04:16:45 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 04:21:06 2022 ] Mean training loss: 0.0668.
313
+ [ Thu Sep 8 04:21:06 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 04:21:06 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 04:28:54 2022 ] Epoch 66 Curr Acc: (35144/59477)59.09%
316
+ [ Thu Sep 8 04:28:54 2022 ] Epoch 53 Best Acc 60.58%
317
+ [ Thu Sep 8 04:28:54 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 04:28:54 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 04:33:15 2022 ] Mean training loss: 0.0558.
320
+ [ Thu Sep 8 04:33:15 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 04:33:16 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 04:41:04 2022 ] Epoch 67 Curr Acc: (34918/59477)58.71%
323
+ [ Thu Sep 8 04:41:04 2022 ] Epoch 53 Best Acc 60.58%
324
+ [ Thu Sep 8 04:41:04 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 04:41:04 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 04:45:25 2022 ] Mean training loss: 0.0578.
327
+ [ Thu Sep 8 04:45:25 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 04:45:25 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 04:53:13 2022 ] Epoch 68 Curr Acc: (35221/59477)59.22%
330
+ [ Thu Sep 8 04:53:13 2022 ] Epoch 53 Best Acc 60.58%
331
+ [ Thu Sep 8 04:53:13 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 04:53:13 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 04:57:34 2022 ] Mean training loss: 0.0592.
334
+ [ Thu Sep 8 04:57:34 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 04:57:34 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 05:05:22 2022 ] Epoch 69 Curr Acc: (35637/59477)59.92%
337
+ [ Thu Sep 8 05:05:22 2022 ] Epoch 53 Best Acc 60.58%
338
+ [ Thu Sep 8 05:05:22 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 05:05:22 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 05:09:42 2022 ] Mean training loss: 0.0493.
341
+ [ Thu Sep 8 05:09:42 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 05:09:43 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 05:17:31 2022 ] Epoch 70 Curr Acc: (34949/59477)58.76%
344
+ [ Thu Sep 8 05:17:31 2022 ] Epoch 53 Best Acc 60.58%
345
+ [ Thu Sep 8 05:17:31 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 05:17:31 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 05:21:52 2022 ] Mean training loss: 0.0381.
348
+ [ Thu Sep 8 05:21:52 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 05:21:52 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 05:29:40 2022 ] Epoch 71 Curr Acc: (35652/59477)59.94%
351
+ [ Thu Sep 8 05:29:40 2022 ] Epoch 53 Best Acc 60.58%
352
+ [ Thu Sep 8 05:29:40 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 05:29:40 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 05:34:01 2022 ] Mean training loss: 0.0282.
355
+ [ Thu Sep 8 05:34:01 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 05:34:02 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 05:41:50 2022 ] Epoch 72 Curr Acc: (35606/59477)59.87%
358
+ [ Thu Sep 8 05:41:50 2022 ] Epoch 53 Best Acc 60.58%
359
+ [ Thu Sep 8 05:41:50 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 05:41:50 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 05:46:11 2022 ] Mean training loss: 0.0261.
362
+ [ Thu Sep 8 05:46:11 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 05:46:11 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 05:53:59 2022 ] Epoch 73 Curr Acc: (35464/59477)59.63%
365
+ [ Thu Sep 8 05:53:59 2022 ] Epoch 53 Best Acc 60.58%
366
+ [ Thu Sep 8 05:53:59 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 05:53:59 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 05:58:20 2022 ] Mean training loss: 0.0262.
369
+ [ Thu Sep 8 05:58:20 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 05:58:20 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 06:06:08 2022 ] Epoch 74 Curr Acc: (35752/59477)60.11%
372
+ [ Thu Sep 8 06:06:08 2022 ] Epoch 53 Best Acc 60.58%
373
+ [ Thu Sep 8 06:06:08 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 06:06:08 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 06:10:29 2022 ] Mean training loss: 0.0240.
376
+ [ Thu Sep 8 06:10:29 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 06:10:29 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 06:18:17 2022 ] Epoch 75 Curr Acc: (35418/59477)59.55%
379
+ [ Thu Sep 8 06:18:17 2022 ] Epoch 53 Best Acc 60.58%
380
+ [ Thu Sep 8 06:18:17 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 06:18:17 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 06:22:38 2022 ] Mean training loss: 0.0237.
383
+ [ Thu Sep 8 06:22:38 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 06:22:38 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 06:30:26 2022 ] Epoch 76 Curr Acc: (35509/59477)59.70%
386
+ [ Thu Sep 8 06:30:26 2022 ] Epoch 53 Best Acc 60.58%
387
+ [ Thu Sep 8 06:30:26 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 06:30:26 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 06:34:47 2022 ] Mean training loss: 0.0215.
390
+ [ Thu Sep 8 06:34:47 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 06:34:47 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 06:42:35 2022 ] Epoch 77 Curr Acc: (35933/59477)60.41%
393
+ [ Thu Sep 8 06:42:35 2022 ] Epoch 53 Best Acc 60.58%
394
+ [ Thu Sep 8 06:42:35 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 06:42:35 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 06:46:56 2022 ] Mean training loss: 0.0203.
397
+ [ Thu Sep 8 06:46:56 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 06:46:56 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 06:54:44 2022 ] Epoch 78 Curr Acc: (35729/59477)60.07%
400
+ [ Thu Sep 8 06:54:44 2022 ] Epoch 53 Best Acc 60.58%
401
+ [ Thu Sep 8 06:54:44 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 06:54:44 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 06:59:04 2022 ] Mean training loss: 0.0199.
404
+ [ Thu Sep 8 06:59:04 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 06:59:04 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 07:06:52 2022 ] Epoch 79 Curr Acc: (35253/59477)59.27%
407
+ [ Thu Sep 8 07:06:52 2022 ] Epoch 53 Best Acc 60.58%
408
+ [ Thu Sep 8 07:06:52 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 07:06:52 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 07:11:13 2022 ] Mean training loss: 0.0214.
411
+ [ Thu Sep 8 07:11:13 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 07:11:13 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 07:19:01 2022 ] Epoch 80 Curr Acc: (35619/59477)59.89%
414
+ [ Thu Sep 8 07:19:01 2022 ] Epoch 53 Best Acc 60.58%
415
+ [ Thu Sep 8 07:19:01 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 07:19:01 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 07:23:22 2022 ] Mean training loss: 0.0198.
418
+ [ Thu Sep 8 07:23:22 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 07:23:22 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 07:31:10 2022 ] Epoch 81 Curr Acc: (35872/59477)60.31%
421
+ [ Thu Sep 8 07:31:10 2022 ] Epoch 53 Best Acc 60.58%
422
+ [ Thu Sep 8 07:31:10 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 07:31:10 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 07:35:31 2022 ] Mean training loss: 0.0198.
425
+ [ Thu Sep 8 07:35:31 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 07:35:31 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 07:43:19 2022 ] Epoch 82 Curr Acc: (35989/59477)60.51%
428
+ [ Thu Sep 8 07:43:19 2022 ] Epoch 53 Best Acc 60.58%
429
+ [ Thu Sep 8 07:43:19 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 07:43:19 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 07:47:41 2022 ] Mean training loss: 0.0192.
432
+ [ Thu Sep 8 07:47:41 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 07:47:41 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 07:55:29 2022 ] Epoch 83 Curr Acc: (35826/59477)60.24%
435
+ [ Thu Sep 8 07:55:29 2022 ] Epoch 53 Best Acc 60.58%
436
+ [ Thu Sep 8 07:55:29 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 07:55:29 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 07:59:49 2022 ] Mean training loss: 0.0196.
439
+ [ Thu Sep 8 07:59:49 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 07:59:49 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 08:07:37 2022 ] Epoch 84 Curr Acc: (35643/59477)59.93%
442
+ [ Thu Sep 8 08:07:37 2022 ] Epoch 53 Best Acc 60.58%
443
+ [ Thu Sep 8 08:07:37 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 08:07:37 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 08:11:57 2022 ] Mean training loss: 0.0183.
446
+ [ Thu Sep 8 08:11:57 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 08:11:57 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 08:19:45 2022 ] Epoch 85 Curr Acc: (35881/59477)60.33%
449
+ [ Thu Sep 8 08:19:45 2022 ] Epoch 53 Best Acc 60.58%
450
+ [ Thu Sep 8 08:19:45 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 08:19:45 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 08:24:05 2022 ] Mean training loss: 0.0176.
453
+ [ Thu Sep 8 08:24:05 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 08:24:05 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 08:31:53 2022 ] Epoch 86 Curr Acc: (35653/59477)59.94%
456
+ [ Thu Sep 8 08:31:53 2022 ] Epoch 53 Best Acc 60.58%
457
+ [ Thu Sep 8 08:31:53 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 08:31:53 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 08:36:13 2022 ] Mean training loss: 0.0165.
460
+ [ Thu Sep 8 08:36:13 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 08:36:13 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 08:44:01 2022 ] Epoch 87 Curr Acc: (35749/59477)60.11%
463
+ [ Thu Sep 8 08:44:01 2022 ] Epoch 53 Best Acc 60.58%
464
+ [ Thu Sep 8 08:44:01 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 08:44:01 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 08:48:21 2022 ] Mean training loss: 0.0172.
467
+ [ Thu Sep 8 08:48:21 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 08:48:21 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 08:56:09 2022 ] Epoch 88 Curr Acc: (35747/59477)60.10%
470
+ [ Thu Sep 8 08:56:09 2022 ] Epoch 53 Best Acc 60.58%
471
+ [ Thu Sep 8 08:56:09 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 08:56:09 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 09:00:29 2022 ] Mean training loss: 0.0170.
474
+ [ Thu Sep 8 09:00:29 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 09:00:29 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 09:08:17 2022 ] Epoch 89 Curr Acc: (35815/59477)60.22%
477
+ [ Thu Sep 8 09:08:17 2022 ] Epoch 53 Best Acc 60.58%
478
+ [ Thu Sep 8 09:08:17 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 09:08:17 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 09:12:38 2022 ] Mean training loss: 0.0171.
481
+ [ Thu Sep 8 09:12:38 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 09:12:38 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 09:20:26 2022 ] Epoch 90 Curr Acc: (35771/59477)60.14%
484
+ [ Thu Sep 8 09:20:26 2022 ] Epoch 53 Best Acc 60.58%
485
+ [ Thu Sep 8 09:20:26 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 09:20:26 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 09:24:47 2022 ] Mean training loss: 0.0170.
488
+ [ Thu Sep 8 09:24:47 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 09:24:47 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 09:32:35 2022 ] Epoch 91 Curr Acc: (35898/59477)60.36%
491
+ [ Thu Sep 8 09:32:35 2022 ] Epoch 53 Best Acc 60.58%
492
+ [ Thu Sep 8 09:32:35 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 09:32:35 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 09:36:56 2022 ] Mean training loss: 0.0163.
495
+ [ Thu Sep 8 09:36:56 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 09:36:56 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 09:44:44 2022 ] Epoch 92 Curr Acc: (35934/59477)60.42%
498
+ [ Thu Sep 8 09:44:44 2022 ] Epoch 53 Best Acc 60.58%
499
+ [ Thu Sep 8 09:44:44 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 09:44:44 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 09:49:06 2022 ] Mean training loss: 0.0174.
502
+ [ Thu Sep 8 09:49:06 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 09:49:06 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 09:56:54 2022 ] Epoch 93 Curr Acc: (35876/59477)60.32%
505
+ [ Thu Sep 8 09:56:54 2022 ] Epoch 53 Best Acc 60.58%
506
+ [ Thu Sep 8 09:56:54 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 09:56:54 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 10:01:16 2022 ] Mean training loss: 0.0173.
509
+ [ Thu Sep 8 10:01:16 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 10:01:16 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 10:09:04 2022 ] Epoch 94 Curr Acc: (35933/59477)60.41%
512
+ [ Thu Sep 8 10:09:04 2022 ] Epoch 53 Best Acc 60.58%
513
+ [ Thu Sep 8 10:09:04 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 10:09:04 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 10:13:25 2022 ] Mean training loss: 0.0171.
516
+ [ Thu Sep 8 10:13:25 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 10:13:26 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 10:21:13 2022 ] Epoch 95 Curr Acc: (35639/59477)59.92%
519
+ [ Thu Sep 8 10:21:13 2022 ] Epoch 53 Best Acc 60.58%
520
+ [ Thu Sep 8 10:21:13 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 10:21:13 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 10:25:34 2022 ] Mean training loss: 0.0186.
523
+ [ Thu Sep 8 10:25:34 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 10:25:34 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 10:33:22 2022 ] Epoch 96 Curr Acc: (35774/59477)60.15%
526
+ [ Thu Sep 8 10:33:22 2022 ] Epoch 53 Best Acc 60.58%
527
+ [ Thu Sep 8 10:33:22 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 10:33:22 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 10:37:44 2022 ] Mean training loss: 0.0168.
530
+ [ Thu Sep 8 10:37:44 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 10:37:44 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 10:45:32 2022 ] Epoch 97 Curr Acc: (35688/59477)60.00%
533
+ [ Thu Sep 8 10:45:32 2022 ] Epoch 53 Best Acc 60.58%
534
+ [ Thu Sep 8 10:45:32 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 10:45:32 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 10:49:54 2022 ] Mean training loss: 0.0166.
537
+ [ Thu Sep 8 10:49:54 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 10:49:54 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 10:57:43 2022 ] Epoch 98 Curr Acc: (35811/59477)60.21%
540
+ [ Thu Sep 8 10:57:43 2022 ] Epoch 53 Best Acc 60.58%
541
+ [ Thu Sep 8 10:57:43 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 10:57:43 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 11:02:05 2022 ] Mean training loss: 0.0165.
544
+ [ Thu Sep 8 11:02:05 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 11:02:05 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 11:09:54 2022 ] Epoch 99 Curr Acc: (36018/59477)60.56%
547
+ [ Thu Sep 8 11:09:54 2022 ] Epoch 53 Best Acc 60.58%
548
+ [ Thu Sep 8 11:09:54 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 11:09:54 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 11:14:15 2022 ] Mean training loss: 0.0160.
551
+ [ Thu Sep 8 11:14:15 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 11:14:15 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 11:22:04 2022 ] Epoch 100 Curr Acc: (35906/59477)60.37%
554
+ [ Thu Sep 8 11:22:04 2022 ] Epoch 53 Best Acc 60.58%
555
+ [ Thu Sep 8 11:22:04 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 11:22:04 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 11:26:26 2022 ] Mean training loss: 0.0159.
558
+ [ Thu Sep 8 11:26:26 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 11:26:26 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 11:34:15 2022 ] Epoch 101 Curr Acc: (35758/59477)60.12%
561
+ [ Thu Sep 8 11:34:15 2022 ] Epoch 53 Best Acc 60.58%
562
+ [ Thu Sep 8 11:34:15 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 11:34:15 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 11:38:38 2022 ] Mean training loss: 0.0157.
565
+ [ Thu Sep 8 11:38:38 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 11:38:38 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 11:46:27 2022 ] Epoch 102 Curr Acc: (35932/59477)60.41%
568
+ [ Thu Sep 8 11:46:27 2022 ] Epoch 53 Best Acc 60.58%
569
+ [ Thu Sep 8 11:46:27 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 11:46:27 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 11:50:49 2022 ] Mean training loss: 0.0176.
572
+ [ Thu Sep 8 11:50:49 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 11:50:49 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 11:58:38 2022 ] Epoch 103 Curr Acc: (35839/59477)60.26%
575
+ [ Thu Sep 8 11:58:38 2022 ] Epoch 53 Best Acc 60.58%
576
+ [ Thu Sep 8 11:58:38 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 11:58:38 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 12:03:00 2022 ] Mean training loss: 0.0184.
579
+ [ Thu Sep 8 12:03:00 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 12:03:00 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 12:10:49 2022 ] Epoch 104 Curr Acc: (35819/59477)60.22%
582
+ [ Thu Sep 8 12:10:49 2022 ] Epoch 53 Best Acc 60.58%
583
+ [ Thu Sep 8 12:10:49 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 12:10:49 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 12:15:11 2022 ] Mean training loss: 0.0157.
586
+ [ Thu Sep 8 12:15:11 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 12:15:11 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 12:23:00 2022 ] Epoch 105 Curr Acc: (35630/59477)59.91%
589
+ [ Thu Sep 8 12:23:00 2022 ] Epoch 53 Best Acc 60.58%
590
+ [ Thu Sep 8 12:23:00 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 12:23:00 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 12:27:23 2022 ] Mean training loss: 0.0172.
593
+ [ Thu Sep 8 12:27:23 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 12:27:23 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 12:35:11 2022 ] Epoch 106 Curr Acc: (35892/59477)60.35%
596
+ [ Thu Sep 8 12:35:11 2022 ] Epoch 53 Best Acc 60.58%
597
+ [ Thu Sep 8 12:35:12 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 12:35:12 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 12:39:34 2022 ] Mean training loss: 0.0157.
600
+ [ Thu Sep 8 12:39:34 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 12:39:34 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 12:47:23 2022 ] Epoch 107 Curr Acc: (35688/59477)60.00%
603
+ [ Thu Sep 8 12:47:23 2022 ] Epoch 53 Best Acc 60.58%
604
+ [ Thu Sep 8 12:47:23 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 12:47:23 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 12:51:46 2022 ] Mean training loss: 0.0163.
607
+ [ Thu Sep 8 12:51:46 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 12:51:46 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 12:59:34 2022 ] Epoch 108 Curr Acc: (35900/59477)60.36%
610
+ [ Thu Sep 8 12:59:34 2022 ] Epoch 53 Best Acc 60.58%
611
+ [ Thu Sep 8 12:59:34 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 12:59:34 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 13:03:57 2022 ] Mean training loss: 0.0162.
614
+ [ Thu Sep 8 13:03:57 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 13:03:57 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 13:11:46 2022 ] Epoch 109 Curr Acc: (35867/59477)60.30%
617
+ [ Thu Sep 8 13:11:46 2022 ] Epoch 53 Best Acc 60.58%
618
+ [ Thu Sep 8 13:11:46 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 13:11:46 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 13:16:09 2022 ] Mean training loss: 0.0158.
621
+ [ Thu Sep 8 13:16:09 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 13:16:09 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 13:23:58 2022 ] Epoch 110 Curr Acc: (35741/59477)60.09%
624
+ [ Thu Sep 8 13:23:58 2022 ] Epoch 53 Best Acc 60.58%
625
+ [ Thu Sep 8 13:23:58 2022 ] epoch: 53, best accuracy: 0.6057971989172285
626
+ [ Thu Sep 8 13:23:58 2022 ] Experiment: ./work_dir/ntu120/xset_b
627
+ [ Thu Sep 8 13:23:58 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 13:23:58 2022 ] Load weights from ./runs/ntu120/xset_b/runs-52-68741.pt.
629
+ [ Thu Sep 8 13:23:58 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 13:31:47 2022 ] Epoch 1 Curr Acc: (36031/59477)60.58%
631
+ [ Thu Sep 8 13:31:47 2022 ] Epoch 53 Best Acc 60.58%
ckpt/Others/MST-GCN/ntu120_xset/xset_bm/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xset/xset_bm/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xset_bm.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xset_bm/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xset_bm
ckpt/Others/MST-GCN/ntu120_xset/xset_bm/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4623e5e955d6059c5eea10e3c6bbcc860f03e1765f0a7b768011e8bd6aada7a4
3
+ size 34946665
ckpt/Others/MST-GCN/ntu120_xset/xset_bm/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:35:43 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:35:43 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xset_bm', 'model_saved_name': './runs/ntu120/xset_bm/runs', 'config': 'config/ntu120/xset_bm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:35:43 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:35:43 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:40:09 2022 ] Mean training loss: 3.7616.
8
+ [ Wed Sep 7 21:40:09 2022 ] Time consumption: [Data]01%, [Network]99%
9
+ [ Wed Sep 7 21:40:09 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:40:09 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:44:32 2022 ] Mean training loss: 2.8929.
12
+ [ Wed Sep 7 21:44:32 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:44:32 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:44:32 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:48:57 2022 ] Mean training loss: 2.3294.
16
+ [ Wed Sep 7 21:48:57 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:48:57 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:48:57 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 21:53:21 2022 ] Mean training loss: 1.9961.
20
+ [ Wed Sep 7 21:53:21 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 21:53:21 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 21:53:21 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 21:57:45 2022 ] Mean training loss: 1.7605.
24
+ [ Wed Sep 7 21:57:45 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 21:57:45 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 21:57:45 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:02:10 2022 ] Mean training loss: 1.6430.
28
+ [ Wed Sep 7 22:02:10 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:02:10 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:02:10 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:06:34 2022 ] Mean training loss: 1.5372.
32
+ [ Wed Sep 7 22:06:34 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:06:34 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:06:34 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:10:58 2022 ] Mean training loss: 1.4878.
36
+ [ Wed Sep 7 22:10:58 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:10:58 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:10:58 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:15:22 2022 ] Mean training loss: 1.4265.
40
+ [ Wed Sep 7 22:15:22 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:15:22 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:15:22 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:19:46 2022 ] Mean training loss: 1.4175.
44
+ [ Wed Sep 7 22:19:46 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:19:46 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:19:46 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:24:11 2022 ] Mean training loss: 1.3220.
48
+ [ Wed Sep 7 22:24:11 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:24:11 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:24:11 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:28:35 2022 ] Mean training loss: 1.3016.
52
+ [ Wed Sep 7 22:28:35 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:28:35 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:28:35 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 22:32:59 2022 ] Mean training loss: 1.2391.
56
+ [ Wed Sep 7 22:32:59 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 22:32:59 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 22:32:59 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 22:37:24 2022 ] Mean training loss: 1.1996.
60
+ [ Wed Sep 7 22:37:24 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 22:37:24 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 22:37:24 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 22:41:48 2022 ] Mean training loss: 1.1829.
64
+ [ Wed Sep 7 22:41:48 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 22:41:48 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 22:41:48 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 22:46:13 2022 ] Mean training loss: 1.1456.
68
+ [ Wed Sep 7 22:46:13 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 22:46:13 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 22:46:13 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 22:50:38 2022 ] Mean training loss: 1.1325.
72
+ [ Wed Sep 7 22:50:38 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 22:50:38 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 22:50:38 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 22:55:02 2022 ] Mean training loss: 1.1006.
76
+ [ Wed Sep 7 22:55:02 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 22:55:02 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 22:55:02 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 22:59:26 2022 ] Mean training loss: 1.0961.
80
+ [ Wed Sep 7 22:59:26 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 22:59:26 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 22:59:26 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:03:51 2022 ] Mean training loss: 1.0554.
84
+ [ Wed Sep 7 23:03:51 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:03:51 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:03:51 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:08:15 2022 ] Mean training loss: 1.0497.
88
+ [ Wed Sep 7 23:08:15 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:08:15 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:08:15 2022 ] Learning rate: 0.15
91
+ [ Wed Sep 7 23:12:39 2022 ] Mean training loss: 1.0341.
92
+ [ Wed Sep 7 23:12:39 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Wed Sep 7 23:12:39 2022 ] Training epoch: 23
94
+ [ Wed Sep 7 23:12:39 2022 ] Learning rate: 0.15
95
+ [ Wed Sep 7 23:17:04 2022 ] Mean training loss: 1.0167.
96
+ [ Wed Sep 7 23:17:04 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Wed Sep 7 23:17:04 2022 ] Training epoch: 24
98
+ [ Wed Sep 7 23:17:04 2022 ] Learning rate: 0.15
99
+ [ Wed Sep 7 23:21:29 2022 ] Mean training loss: 1.0323.
100
+ [ Wed Sep 7 23:21:29 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Wed Sep 7 23:21:29 2022 ] Training epoch: 25
102
+ [ Wed Sep 7 23:21:29 2022 ] Learning rate: 0.15
103
+ [ Wed Sep 7 23:25:54 2022 ] Mean training loss: 1.0009.
104
+ [ Wed Sep 7 23:25:54 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Wed Sep 7 23:25:54 2022 ] Training epoch: 26
106
+ [ Wed Sep 7 23:25:54 2022 ] Learning rate: 0.15
107
+ [ Wed Sep 7 23:30:19 2022 ] Mean training loss: 1.0010.
108
+ [ Wed Sep 7 23:30:19 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Wed Sep 7 23:30:19 2022 ] Training epoch: 27
110
+ [ Wed Sep 7 23:30:19 2022 ] Learning rate: 0.15
111
+ [ Wed Sep 7 23:34:43 2022 ] Mean training loss: 0.9783.
112
+ [ Wed Sep 7 23:34:43 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Wed Sep 7 23:34:43 2022 ] Training epoch: 28
114
+ [ Wed Sep 7 23:34:43 2022 ] Learning rate: 0.15
115
+ [ Wed Sep 7 23:39:08 2022 ] Mean training loss: 0.9682.
116
+ [ Wed Sep 7 23:39:08 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Wed Sep 7 23:39:08 2022 ] Training epoch: 29
118
+ [ Wed Sep 7 23:39:08 2022 ] Learning rate: 0.15
119
+ [ Wed Sep 7 23:43:32 2022 ] Mean training loss: 0.9655.
120
+ [ Wed Sep 7 23:43:32 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Wed Sep 7 23:43:32 2022 ] Training epoch: 30
122
+ [ Wed Sep 7 23:43:32 2022 ] Learning rate: 0.15
123
+ [ Wed Sep 7 23:47:57 2022 ] Mean training loss: 0.9336.
124
+ [ Wed Sep 7 23:47:57 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Wed Sep 7 23:47:57 2022 ] Training epoch: 31
126
+ [ Wed Sep 7 23:47:57 2022 ] Learning rate: 0.15
127
+ [ Wed Sep 7 23:52:22 2022 ] Mean training loss: 0.9539.
128
+ [ Wed Sep 7 23:52:22 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Wed Sep 7 23:52:22 2022 ] Training epoch: 32
130
+ [ Wed Sep 7 23:52:22 2022 ] Learning rate: 0.15
131
+ [ Wed Sep 7 23:56:47 2022 ] Mean training loss: 0.9466.
132
+ [ Wed Sep 7 23:56:47 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Wed Sep 7 23:56:47 2022 ] Training epoch: 33
134
+ [ Wed Sep 7 23:56:47 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 00:01:12 2022 ] Mean training loss: 0.9424.
136
+ [ Thu Sep 8 00:01:12 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 00:01:12 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 00:01:12 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 00:05:36 2022 ] Mean training loss: 0.9185.
140
+ [ Thu Sep 8 00:05:36 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 00:05:36 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 00:05:36 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 00:10:01 2022 ] Mean training loss: 0.9306.
144
+ [ Thu Sep 8 00:10:01 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 00:10:01 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 00:10:01 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 00:14:26 2022 ] Mean training loss: 0.9035.
148
+ [ Thu Sep 8 00:14:26 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 00:14:26 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 00:14:26 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 00:18:50 2022 ] Mean training loss: 0.9184.
152
+ [ Thu Sep 8 00:18:50 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 00:18:50 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 00:18:50 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 00:23:14 2022 ] Mean training loss: 0.9169.
156
+ [ Thu Sep 8 00:23:14 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 00:23:14 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 00:23:14 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 00:27:39 2022 ] Mean training loss: 0.9008.
160
+ [ Thu Sep 8 00:27:39 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 00:27:39 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 00:27:39 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 00:32:03 2022 ] Mean training loss: 0.9175.
164
+ [ Thu Sep 8 00:32:03 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 00:32:03 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 00:32:03 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 00:36:27 2022 ] Mean training loss: 0.8860.
168
+ [ Thu Sep 8 00:36:27 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 00:36:27 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 00:36:27 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 00:40:52 2022 ] Mean training loss: 0.9037.
172
+ [ Thu Sep 8 00:40:52 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 00:40:52 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 00:40:52 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 00:45:15 2022 ] Mean training loss: 0.8643.
176
+ [ Thu Sep 8 00:45:15 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 00:45:15 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 00:45:15 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 00:49:39 2022 ] Mean training loss: 0.8888.
180
+ [ Thu Sep 8 00:49:39 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 00:49:39 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 00:49:39 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 00:54:03 2022 ] Mean training loss: 0.8861.
184
+ [ Thu Sep 8 00:54:03 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 00:54:03 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 00:54:03 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 00:58:25 2022 ] Mean training loss: 0.9013.
188
+ [ Thu Sep 8 00:58:25 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 00:58:25 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 00:58:25 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 01:02:49 2022 ] Mean training loss: 0.8633.
192
+ [ Thu Sep 8 01:02:49 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 01:02:49 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 01:02:49 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 01:07:12 2022 ] Mean training loss: 0.8594.
196
+ [ Thu Sep 8 01:07:12 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 01:07:12 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 01:07:12 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 01:11:37 2022 ] Mean training loss: 0.8595.
200
+ [ Thu Sep 8 01:11:37 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 01:11:37 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 01:11:37 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 01:16:01 2022 ] Mean training loss: 0.8746.
204
+ [ Thu Sep 8 01:16:01 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 01:16:01 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 01:16:01 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 01:20:24 2022 ] Mean training loss: 0.4132.
208
+ [ Thu Sep 8 01:20:24 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 01:20:24 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 01:28:23 2022 ] Epoch 51 Curr Acc: (32764/59477)55.09%
211
+ [ Thu Sep 8 01:28:23 2022 ] Epoch 51 Best Acc 55.09%
212
+ [ Thu Sep 8 01:28:23 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 01:28:23 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 01:32:48 2022 ] Mean training loss: 0.2703.
215
+ [ Thu Sep 8 01:32:48 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 01:32:48 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 01:40:37 2022 ] Epoch 52 Curr Acc: (33744/59477)56.73%
218
+ [ Thu Sep 8 01:40:37 2022 ] Epoch 52 Best Acc 56.73%
219
+ [ Thu Sep 8 01:40:37 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 01:40:37 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 01:45:02 2022 ] Mean training loss: 0.2134.
222
+ [ Thu Sep 8 01:45:02 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 01:45:02 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 01:52:50 2022 ] Epoch 53 Curr Acc: (34759/59477)58.44%
225
+ [ Thu Sep 8 01:52:50 2022 ] Epoch 53 Best Acc 58.44%
226
+ [ Thu Sep 8 01:52:50 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 01:52:50 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 01:57:15 2022 ] Mean training loss: 0.1782.
229
+ [ Thu Sep 8 01:57:15 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 01:57:15 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 02:05:04 2022 ] Epoch 54 Curr Acc: (34526/59477)58.05%
232
+ [ Thu Sep 8 02:05:04 2022 ] Epoch 53 Best Acc 58.44%
233
+ [ Thu Sep 8 02:05:04 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 02:05:04 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 02:09:28 2022 ] Mean training loss: 0.1372.
236
+ [ Thu Sep 8 02:09:28 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 02:09:28 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 02:17:17 2022 ] Epoch 55 Curr Acc: (34180/59477)57.47%
239
+ [ Thu Sep 8 02:17:17 2022 ] Epoch 53 Best Acc 58.44%
240
+ [ Thu Sep 8 02:17:17 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 02:17:17 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 02:21:41 2022 ] Mean training loss: 0.1177.
243
+ [ Thu Sep 8 02:21:41 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 02:21:41 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 02:29:30 2022 ] Epoch 56 Curr Acc: (33575/59477)56.45%
246
+ [ Thu Sep 8 02:29:30 2022 ] Epoch 53 Best Acc 58.44%
247
+ [ Thu Sep 8 02:29:30 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 02:29:30 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 02:33:55 2022 ] Mean training loss: 0.0980.
250
+ [ Thu Sep 8 02:33:55 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 02:33:55 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 02:41:44 2022 ] Epoch 57 Curr Acc: (34946/59477)58.76%
253
+ [ Thu Sep 8 02:41:44 2022 ] Epoch 57 Best Acc 58.76%
254
+ [ Thu Sep 8 02:41:44 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 02:41:44 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 02:46:07 2022 ] Mean training loss: 0.0823.
257
+ [ Thu Sep 8 02:46:07 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 02:46:07 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 02:53:56 2022 ] Epoch 58 Curr Acc: (34670/59477)58.29%
260
+ [ Thu Sep 8 02:53:56 2022 ] Epoch 57 Best Acc 58.76%
261
+ [ Thu Sep 8 02:53:56 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 02:53:56 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 02:58:20 2022 ] Mean training loss: 0.0653.
264
+ [ Thu Sep 8 02:58:20 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 02:58:20 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 03:06:08 2022 ] Epoch 59 Curr Acc: (34472/59477)57.96%
267
+ [ Thu Sep 8 03:06:08 2022 ] Epoch 57 Best Acc 58.76%
268
+ [ Thu Sep 8 03:06:08 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 03:06:08 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 03:10:31 2022 ] Mean training loss: 0.0597.
271
+ [ Thu Sep 8 03:10:31 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 03:10:31 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 03:18:19 2022 ] Epoch 60 Curr Acc: (34501/59477)58.01%
274
+ [ Thu Sep 8 03:18:19 2022 ] Epoch 57 Best Acc 58.76%
275
+ [ Thu Sep 8 03:18:19 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 03:18:19 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 03:22:42 2022 ] Mean training loss: 0.0531.
278
+ [ Thu Sep 8 03:22:42 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 03:22:42 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 03:30:30 2022 ] Epoch 61 Curr Acc: (33065/59477)55.59%
281
+ [ Thu Sep 8 03:30:30 2022 ] Epoch 57 Best Acc 58.76%
282
+ [ Thu Sep 8 03:30:30 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 03:30:30 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 03:34:52 2022 ] Mean training loss: 0.0472.
285
+ [ Thu Sep 8 03:34:52 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 03:34:52 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 03:42:40 2022 ] Epoch 62 Curr Acc: (34525/59477)58.05%
288
+ [ Thu Sep 8 03:42:40 2022 ] Epoch 57 Best Acc 58.76%
289
+ [ Thu Sep 8 03:42:40 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 03:42:40 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 03:47:01 2022 ] Mean training loss: 0.0389.
292
+ [ Thu Sep 8 03:47:01 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 03:47:02 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 03:54:50 2022 ] Epoch 63 Curr Acc: (34550/59477)58.09%
295
+ [ Thu Sep 8 03:54:50 2022 ] Epoch 57 Best Acc 58.76%
296
+ [ Thu Sep 8 03:54:50 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 03:54:50 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 03:59:11 2022 ] Mean training loss: 0.0394.
299
+ [ Thu Sep 8 03:59:11 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 03:59:11 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 04:06:59 2022 ] Epoch 64 Curr Acc: (34375/59477)57.80%
302
+ [ Thu Sep 8 04:06:59 2022 ] Epoch 57 Best Acc 58.76%
303
+ [ Thu Sep 8 04:06:59 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 04:06:59 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 04:11:20 2022 ] Mean training loss: 0.0407.
306
+ [ Thu Sep 8 04:11:20 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 04:11:20 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 04:19:08 2022 ] Epoch 65 Curr Acc: (34053/59477)57.25%
309
+ [ Thu Sep 8 04:19:08 2022 ] Epoch 57 Best Acc 58.76%
310
+ [ Thu Sep 8 04:19:08 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 04:19:08 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 04:23:30 2022 ] Mean training loss: 0.0376.
313
+ [ Thu Sep 8 04:23:30 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 04:23:30 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 04:31:18 2022 ] Epoch 66 Curr Acc: (34359/59477)57.77%
316
+ [ Thu Sep 8 04:31:18 2022 ] Epoch 57 Best Acc 58.76%
317
+ [ Thu Sep 8 04:31:18 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 04:31:18 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 04:35:40 2022 ] Mean training loss: 0.0326.
320
+ [ Thu Sep 8 04:35:40 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 04:35:40 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 04:43:29 2022 ] Epoch 67 Curr Acc: (34243/59477)57.57%
323
+ [ Thu Sep 8 04:43:29 2022 ] Epoch 57 Best Acc 58.76%
324
+ [ Thu Sep 8 04:43:29 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 04:43:29 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 04:47:50 2022 ] Mean training loss: 0.0375.
327
+ [ Thu Sep 8 04:47:50 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 04:47:50 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 04:55:38 2022 ] Epoch 68 Curr Acc: (33569/59477)56.44%
330
+ [ Thu Sep 8 04:55:38 2022 ] Epoch 57 Best Acc 58.76%
331
+ [ Thu Sep 8 04:55:38 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 04:55:38 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 05:00:00 2022 ] Mean training loss: 0.0333.
334
+ [ Thu Sep 8 05:00:00 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 05:00:00 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 05:07:48 2022 ] Epoch 69 Curr Acc: (34417/59477)57.87%
337
+ [ Thu Sep 8 05:07:48 2022 ] Epoch 57 Best Acc 58.76%
338
+ [ Thu Sep 8 05:07:48 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 05:07:48 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 05:12:11 2022 ] Mean training loss: 0.0329.
341
+ [ Thu Sep 8 05:12:11 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 05:12:11 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 05:19:58 2022 ] Epoch 70 Curr Acc: (33863/59477)56.93%
344
+ [ Thu Sep 8 05:19:58 2022 ] Epoch 57 Best Acc 58.76%
345
+ [ Thu Sep 8 05:19:58 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 05:19:58 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 05:24:20 2022 ] Mean training loss: 0.0224.
348
+ [ Thu Sep 8 05:24:20 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 05:24:20 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 05:32:08 2022 ] Epoch 71 Curr Acc: (34442/59477)57.91%
351
+ [ Thu Sep 8 05:32:08 2022 ] Epoch 57 Best Acc 58.76%
352
+ [ Thu Sep 8 05:32:08 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 05:32:08 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 05:36:30 2022 ] Mean training loss: 0.0190.
355
+ [ Thu Sep 8 05:36:30 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 05:36:30 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 05:44:17 2022 ] Epoch 72 Curr Acc: (34665/59477)58.28%
358
+ [ Thu Sep 8 05:44:17 2022 ] Epoch 57 Best Acc 58.76%
359
+ [ Thu Sep 8 05:44:17 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 05:44:17 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 05:48:39 2022 ] Mean training loss: 0.0169.
362
+ [ Thu Sep 8 05:48:39 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 05:48:39 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 05:56:27 2022 ] Epoch 73 Curr Acc: (34484/59477)57.98%
365
+ [ Thu Sep 8 05:56:27 2022 ] Epoch 57 Best Acc 58.76%
366
+ [ Thu Sep 8 05:56:27 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 05:56:27 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 06:00:48 2022 ] Mean training loss: 0.0175.
369
+ [ Thu Sep 8 06:00:48 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 06:00:48 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 06:08:36 2022 ] Epoch 74 Curr Acc: (34740/59477)58.41%
372
+ [ Thu Sep 8 06:08:36 2022 ] Epoch 57 Best Acc 58.76%
373
+ [ Thu Sep 8 06:08:36 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 06:08:36 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 06:12:58 2022 ] Mean training loss: 0.0159.
376
+ [ Thu Sep 8 06:12:58 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 06:12:58 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 06:20:45 2022 ] Epoch 75 Curr Acc: (34399/59477)57.84%
379
+ [ Thu Sep 8 06:20:45 2022 ] Epoch 57 Best Acc 58.76%
380
+ [ Thu Sep 8 06:20:45 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 06:20:45 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 06:25:07 2022 ] Mean training loss: 0.0149.
383
+ [ Thu Sep 8 06:25:07 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 06:25:07 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 06:32:54 2022 ] Epoch 76 Curr Acc: (34653/59477)58.26%
386
+ [ Thu Sep 8 06:32:54 2022 ] Epoch 57 Best Acc 58.76%
387
+ [ Thu Sep 8 06:32:54 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 06:32:54 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 06:37:16 2022 ] Mean training loss: 0.0167.
390
+ [ Thu Sep 8 06:37:16 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 06:37:16 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 06:45:03 2022 ] Epoch 77 Curr Acc: (34762/59477)58.45%
393
+ [ Thu Sep 8 06:45:03 2022 ] Epoch 57 Best Acc 58.76%
394
+ [ Thu Sep 8 06:45:03 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 06:45:03 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 06:49:25 2022 ] Mean training loss: 0.0148.
397
+ [ Thu Sep 8 06:49:25 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 06:49:25 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 06:57:13 2022 ] Epoch 78 Curr Acc: (34756/59477)58.44%
400
+ [ Thu Sep 8 06:57:13 2022 ] Epoch 57 Best Acc 58.76%
401
+ [ Thu Sep 8 06:57:13 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 06:57:13 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 07:01:34 2022 ] Mean training loss: 0.0150.
404
+ [ Thu Sep 8 07:01:34 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 07:01:34 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 07:09:21 2022 ] Epoch 79 Curr Acc: (33861/59477)56.93%
407
+ [ Thu Sep 8 07:09:21 2022 ] Epoch 57 Best Acc 58.76%
408
+ [ Thu Sep 8 07:09:21 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 07:09:21 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 07:13:44 2022 ] Mean training loss: 0.0138.
411
+ [ Thu Sep 8 07:13:44 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 07:13:44 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 07:21:31 2022 ] Epoch 80 Curr Acc: (34681/59477)58.31%
414
+ [ Thu Sep 8 07:21:31 2022 ] Epoch 57 Best Acc 58.76%
415
+ [ Thu Sep 8 07:21:31 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 07:21:31 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 07:25:54 2022 ] Mean training loss: 0.0137.
418
+ [ Thu Sep 8 07:25:54 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 07:25:54 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 07:33:41 2022 ] Epoch 81 Curr Acc: (34671/59477)58.29%
421
+ [ Thu Sep 8 07:33:41 2022 ] Epoch 57 Best Acc 58.76%
422
+ [ Thu Sep 8 07:33:41 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 07:33:41 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 07:38:03 2022 ] Mean training loss: 0.0154.
425
+ [ Thu Sep 8 07:38:03 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 07:38:03 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 07:45:51 2022 ] Epoch 82 Curr Acc: (34766/59477)58.45%
428
+ [ Thu Sep 8 07:45:51 2022 ] Epoch 57 Best Acc 58.76%
429
+ [ Thu Sep 8 07:45:51 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 07:45:51 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 07:50:13 2022 ] Mean training loss: 0.0134.
432
+ [ Thu Sep 8 07:50:13 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 07:50:13 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 07:58:00 2022 ] Epoch 83 Curr Acc: (34705/59477)58.35%
435
+ [ Thu Sep 8 07:58:00 2022 ] Epoch 57 Best Acc 58.76%
436
+ [ Thu Sep 8 07:58:00 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 07:58:00 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 08:02:22 2022 ] Mean training loss: 0.0133.
439
+ [ Thu Sep 8 08:02:22 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 08:02:22 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 08:10:09 2022 ] Epoch 84 Curr Acc: (34637/59477)58.24%
442
+ [ Thu Sep 8 08:10:09 2022 ] Epoch 57 Best Acc 58.76%
443
+ [ Thu Sep 8 08:10:10 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 08:10:10 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 08:14:31 2022 ] Mean training loss: 0.0128.
446
+ [ Thu Sep 8 08:14:31 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 08:14:31 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 08:22:19 2022 ] Epoch 85 Curr Acc: (34649/59477)58.26%
449
+ [ Thu Sep 8 08:22:19 2022 ] Epoch 57 Best Acc 58.76%
450
+ [ Thu Sep 8 08:22:19 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 08:22:19 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 08:26:40 2022 ] Mean training loss: 0.0135.
453
+ [ Thu Sep 8 08:26:40 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 08:26:40 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 08:34:28 2022 ] Epoch 86 Curr Acc: (34534/59477)58.06%
456
+ [ Thu Sep 8 08:34:28 2022 ] Epoch 57 Best Acc 58.76%
457
+ [ Thu Sep 8 08:34:28 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 08:34:28 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 08:38:49 2022 ] Mean training loss: 0.0114.
460
+ [ Thu Sep 8 08:38:49 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 08:38:50 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 08:46:37 2022 ] Epoch 87 Curr Acc: (34684/59477)58.31%
463
+ [ Thu Sep 8 08:46:37 2022 ] Epoch 57 Best Acc 58.76%
464
+ [ Thu Sep 8 08:46:37 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 08:46:37 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 08:50:59 2022 ] Mean training loss: 0.0127.
467
+ [ Thu Sep 8 08:50:59 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 08:50:59 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 08:58:46 2022 ] Epoch 88 Curr Acc: (33591/59477)56.48%
470
+ [ Thu Sep 8 08:58:46 2022 ] Epoch 57 Best Acc 58.76%
471
+ [ Thu Sep 8 08:58:47 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 08:58:47 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 09:03:09 2022 ] Mean training loss: 0.0123.
474
+ [ Thu Sep 8 09:03:09 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 09:03:09 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 09:10:56 2022 ] Epoch 89 Curr Acc: (34079/59477)57.30%
477
+ [ Thu Sep 8 09:10:56 2022 ] Epoch 57 Best Acc 58.76%
478
+ [ Thu Sep 8 09:10:56 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 09:10:56 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 09:15:18 2022 ] Mean training loss: 0.0137.
481
+ [ Thu Sep 8 09:15:18 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 09:15:18 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 09:23:05 2022 ] Epoch 90 Curr Acc: (34595/59477)58.17%
484
+ [ Thu Sep 8 09:23:05 2022 ] Epoch 57 Best Acc 58.76%
485
+ [ Thu Sep 8 09:23:05 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 09:23:05 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 09:27:27 2022 ] Mean training loss: 0.0133.
488
+ [ Thu Sep 8 09:27:27 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 09:27:27 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 09:35:14 2022 ] Epoch 91 Curr Acc: (34765/59477)58.45%
491
+ [ Thu Sep 8 09:35:14 2022 ] Epoch 57 Best Acc 58.76%
492
+ [ Thu Sep 8 09:35:14 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 09:35:14 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 09:39:36 2022 ] Mean training loss: 0.0131.
495
+ [ Thu Sep 8 09:39:36 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 09:39:36 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 09:47:24 2022 ] Epoch 92 Curr Acc: (34956/59477)58.77%
498
+ [ Thu Sep 8 09:47:24 2022 ] Epoch 92 Best Acc 58.77%
499
+ [ Thu Sep 8 09:47:24 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 09:47:24 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 09:51:45 2022 ] Mean training loss: 0.0126.
502
+ [ Thu Sep 8 09:51:45 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 09:51:45 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 09:59:33 2022 ] Epoch 93 Curr Acc: (34953/59477)58.77%
505
+ [ Thu Sep 8 09:59:33 2022 ] Epoch 92 Best Acc 58.77%
506
+ [ Thu Sep 8 09:59:33 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 09:59:33 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 10:03:54 2022 ] Mean training loss: 0.0135.
509
+ [ Thu Sep 8 10:03:54 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 10:03:54 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 10:11:42 2022 ] Epoch 94 Curr Acc: (35044/59477)58.92%
512
+ [ Thu Sep 8 10:11:42 2022 ] Epoch 94 Best Acc 58.92%
513
+ [ Thu Sep 8 10:11:42 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 10:11:42 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 10:16:03 2022 ] Mean training loss: 0.0117.
516
+ [ Thu Sep 8 10:16:03 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 10:16:03 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 10:23:51 2022 ] Epoch 95 Curr Acc: (33790/59477)56.81%
519
+ [ Thu Sep 8 10:23:51 2022 ] Epoch 94 Best Acc 58.92%
520
+ [ Thu Sep 8 10:23:51 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 10:23:51 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 10:28:13 2022 ] Mean training loss: 0.0124.
523
+ [ Thu Sep 8 10:28:13 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 10:28:13 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 10:36:00 2022 ] Epoch 96 Curr Acc: (34878/59477)58.64%
526
+ [ Thu Sep 8 10:36:00 2022 ] Epoch 94 Best Acc 58.92%
527
+ [ Thu Sep 8 10:36:00 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 10:36:00 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 10:40:24 2022 ] Mean training loss: 0.0122.
530
+ [ Thu Sep 8 10:40:24 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 10:40:24 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 10:48:12 2022 ] Epoch 97 Curr Acc: (34601/59477)58.18%
533
+ [ Thu Sep 8 10:48:12 2022 ] Epoch 94 Best Acc 58.92%
534
+ [ Thu Sep 8 10:48:12 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 10:48:12 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 10:52:34 2022 ] Mean training loss: 0.0127.
537
+ [ Thu Sep 8 10:52:34 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 10:52:34 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 11:00:22 2022 ] Epoch 98 Curr Acc: (34640/59477)58.24%
540
+ [ Thu Sep 8 11:00:22 2022 ] Epoch 94 Best Acc 58.92%
541
+ [ Thu Sep 8 11:00:22 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 11:00:22 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 11:04:45 2022 ] Mean training loss: 0.0119.
544
+ [ Thu Sep 8 11:04:45 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 11:04:45 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 11:12:32 2022 ] Epoch 99 Curr Acc: (34937/59477)58.74%
547
+ [ Thu Sep 8 11:12:32 2022 ] Epoch 94 Best Acc 58.92%
548
+ [ Thu Sep 8 11:12:33 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 11:12:33 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 11:16:55 2022 ] Mean training loss: 0.0120.
551
+ [ Thu Sep 8 11:16:55 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 11:16:55 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 11:24:43 2022 ] Epoch 100 Curr Acc: (34501/59477)58.01%
554
+ [ Thu Sep 8 11:24:43 2022 ] Epoch 94 Best Acc 58.92%
555
+ [ Thu Sep 8 11:24:43 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 11:24:43 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 11:29:05 2022 ] Mean training loss: 0.0122.
558
+ [ Thu Sep 8 11:29:05 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 11:29:05 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 11:36:54 2022 ] Epoch 101 Curr Acc: (34881/59477)58.65%
561
+ [ Thu Sep 8 11:36:54 2022 ] Epoch 94 Best Acc 58.92%
562
+ [ Thu Sep 8 11:36:54 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 11:36:54 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 11:41:16 2022 ] Mean training loss: 0.0118.
565
+ [ Thu Sep 8 11:41:16 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 11:41:16 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 11:49:04 2022 ] Epoch 102 Curr Acc: (35078/59477)58.98%
568
+ [ Thu Sep 8 11:49:04 2022 ] Epoch 102 Best Acc 58.98%
569
+ [ Thu Sep 8 11:49:04 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 11:49:04 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 11:53:27 2022 ] Mean training loss: 0.0119.
572
+ [ Thu Sep 8 11:53:27 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 11:53:27 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 12:01:15 2022 ] Epoch 103 Curr Acc: (34416/59477)57.86%
575
+ [ Thu Sep 8 12:01:15 2022 ] Epoch 102 Best Acc 58.98%
576
+ [ Thu Sep 8 12:01:15 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 12:01:15 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 12:05:38 2022 ] Mean training loss: 0.0125.
579
+ [ Thu Sep 8 12:05:38 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 12:05:38 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 12:13:26 2022 ] Epoch 104 Curr Acc: (34796/59477)58.50%
582
+ [ Thu Sep 8 12:13:26 2022 ] Epoch 102 Best Acc 58.98%
583
+ [ Thu Sep 8 12:13:26 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 12:13:26 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 12:17:48 2022 ] Mean training loss: 0.0115.
586
+ [ Thu Sep 8 12:17:48 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 12:17:49 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 12:25:37 2022 ] Epoch 105 Curr Acc: (34704/59477)58.35%
589
+ [ Thu Sep 8 12:25:37 2022 ] Epoch 102 Best Acc 58.98%
590
+ [ Thu Sep 8 12:25:37 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 12:25:37 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 12:29:59 2022 ] Mean training loss: 0.0128.
593
+ [ Thu Sep 8 12:29:59 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 12:29:59 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 12:37:47 2022 ] Epoch 106 Curr Acc: (34773/59477)58.46%
596
+ [ Thu Sep 8 12:37:47 2022 ] Epoch 102 Best Acc 58.98%
597
+ [ Thu Sep 8 12:37:47 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 12:37:47 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 12:42:09 2022 ] Mean training loss: 0.0120.
600
+ [ Thu Sep 8 12:42:09 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 12:42:09 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 12:49:58 2022 ] Epoch 107 Curr Acc: (34658/59477)58.27%
603
+ [ Thu Sep 8 12:49:58 2022 ] Epoch 102 Best Acc 58.98%
604
+ [ Thu Sep 8 12:49:58 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 12:49:58 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 12:54:19 2022 ] Mean training loss: 0.0108.
607
+ [ Thu Sep 8 12:54:19 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 12:54:19 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 13:02:07 2022 ] Epoch 108 Curr Acc: (34883/59477)58.65%
610
+ [ Thu Sep 8 13:02:07 2022 ] Epoch 102 Best Acc 58.98%
611
+ [ Thu Sep 8 13:02:07 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 13:02:07 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 13:06:29 2022 ] Mean training loss: 0.0124.
614
+ [ Thu Sep 8 13:06:29 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 13:06:29 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 13:14:17 2022 ] Epoch 109 Curr Acc: (34702/59477)58.35%
617
+ [ Thu Sep 8 13:14:17 2022 ] Epoch 102 Best Acc 58.98%
618
+ [ Thu Sep 8 13:14:17 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 13:14:17 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 13:18:38 2022 ] Mean training loss: 0.0120.
621
+ [ Thu Sep 8 13:18:38 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 13:18:38 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 13:26:27 2022 ] Epoch 110 Curr Acc: (34520/59477)58.04%
624
+ [ Thu Sep 8 13:26:27 2022 ] Epoch 102 Best Acc 58.98%
625
+ [ Thu Sep 8 13:26:27 2022 ] epoch: 102, best accuracy: 0.5897741984296451
626
+ [ Thu Sep 8 13:26:27 2022 ] Experiment: ./work_dir/ntu120/xset_bm
627
+ [ Thu Sep 8 13:26:27 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 13:26:27 2022 ] Load weights from ./runs/ntu120/xset_bm/runs-101-132294.pt.
629
+ [ Thu Sep 8 13:26:27 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 13:34:15 2022 ] Epoch 1 Curr Acc: (35078/59477)58.98%
631
+ [ Thu Sep 8 13:34:15 2022 ] Epoch 102 Best Acc 58.98%
ckpt/Others/MST-GCN/ntu120_xset/xset_j/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xset/xset_j/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xset_j.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xset_j/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xset_j
ckpt/Others/MST-GCN/ntu120_xset/xset_j/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:07508146e0acf0d9152cf573420ce523856f97de6dad49a42ab2bd14d86cebbe
3
+ size 34946665
ckpt/Others/MST-GCN/ntu120_xset/xset_j/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:35:51 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:35:51 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xset_j', 'model_saved_name': './runs/ntu120/xset_j/runs', 'config': 'config/ntu120/xset_j.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:35:51 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:35:51 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:40:17 2022 ] Mean training loss: 3.6800.
8
+ [ Wed Sep 7 21:40:17 2022 ] Time consumption: [Data]01%, [Network]98%
9
+ [ Wed Sep 7 21:40:17 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:40:17 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:44:42 2022 ] Mean training loss: 2.8223.
12
+ [ Wed Sep 7 21:44:42 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:44:42 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:44:42 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:49:05 2022 ] Mean training loss: 2.3654.
16
+ [ Wed Sep 7 21:49:05 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:49:05 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:49:05 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 21:53:27 2022 ] Mean training loss: 2.0752.
20
+ [ Wed Sep 7 21:53:27 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 21:53:27 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 21:53:27 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 21:57:50 2022 ] Mean training loss: 1.8883.
24
+ [ Wed Sep 7 21:57:50 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 21:57:50 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 21:57:50 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:02:13 2022 ] Mean training loss: 1.7515.
28
+ [ Wed Sep 7 22:02:13 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:02:13 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:02:13 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:06:35 2022 ] Mean training loss: 1.6382.
32
+ [ Wed Sep 7 22:06:35 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:06:35 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:06:35 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:10:58 2022 ] Mean training loss: 1.5736.
36
+ [ Wed Sep 7 22:10:58 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:10:58 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:10:58 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:15:20 2022 ] Mean training loss: 1.5032.
40
+ [ Wed Sep 7 22:15:20 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:15:20 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:15:20 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:19:43 2022 ] Mean training loss: 1.4871.
44
+ [ Wed Sep 7 22:19:43 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:19:43 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:19:43 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:24:05 2022 ] Mean training loss: 1.3952.
48
+ [ Wed Sep 7 22:24:05 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:24:05 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:24:05 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:28:28 2022 ] Mean training loss: 1.3432.
52
+ [ Wed Sep 7 22:28:28 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:28:28 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:28:28 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 22:32:51 2022 ] Mean training loss: 1.2907.
56
+ [ Wed Sep 7 22:32:51 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 22:32:51 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 22:32:51 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 22:37:13 2022 ] Mean training loss: 1.2527.
60
+ [ Wed Sep 7 22:37:13 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 22:37:13 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 22:37:13 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 22:41:36 2022 ] Mean training loss: 1.2237.
64
+ [ Wed Sep 7 22:41:36 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 22:41:36 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 22:41:36 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 22:45:59 2022 ] Mean training loss: 1.1655.
68
+ [ Wed Sep 7 22:45:59 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 22:45:59 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 22:45:59 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 22:50:23 2022 ] Mean training loss: 1.1655.
72
+ [ Wed Sep 7 22:50:23 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 22:50:23 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 22:50:23 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 22:54:46 2022 ] Mean training loss: 1.1225.
76
+ [ Wed Sep 7 22:54:46 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 22:54:46 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 22:54:46 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 22:59:09 2022 ] Mean training loss: 1.1175.
80
+ [ Wed Sep 7 22:59:09 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 22:59:09 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 22:59:09 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:03:32 2022 ] Mean training loss: 1.0823.
84
+ [ Wed Sep 7 23:03:32 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:03:32 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:03:32 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:07:55 2022 ] Mean training loss: 1.0720.
88
+ [ Wed Sep 7 23:07:55 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:07:55 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:07:55 2022 ] Learning rate: 0.15
91
+ [ Wed Sep 7 23:12:18 2022 ] Mean training loss: 1.0578.
92
+ [ Wed Sep 7 23:12:18 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Wed Sep 7 23:12:18 2022 ] Training epoch: 23
94
+ [ Wed Sep 7 23:12:18 2022 ] Learning rate: 0.15
95
+ [ Wed Sep 7 23:16:42 2022 ] Mean training loss: 1.0478.
96
+ [ Wed Sep 7 23:16:42 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Wed Sep 7 23:16:42 2022 ] Training epoch: 24
98
+ [ Wed Sep 7 23:16:42 2022 ] Learning rate: 0.15
99
+ [ Wed Sep 7 23:21:06 2022 ] Mean training loss: 1.0217.
100
+ [ Wed Sep 7 23:21:06 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Wed Sep 7 23:21:06 2022 ] Training epoch: 25
102
+ [ Wed Sep 7 23:21:06 2022 ] Learning rate: 0.15
103
+ [ Wed Sep 7 23:25:29 2022 ] Mean training loss: 1.0131.
104
+ [ Wed Sep 7 23:25:29 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Wed Sep 7 23:25:29 2022 ] Training epoch: 26
106
+ [ Wed Sep 7 23:25:29 2022 ] Learning rate: 0.15
107
+ [ Wed Sep 7 23:29:53 2022 ] Mean training loss: 0.9915.
108
+ [ Wed Sep 7 23:29:53 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Wed Sep 7 23:29:53 2022 ] Training epoch: 27
110
+ [ Wed Sep 7 23:29:53 2022 ] Learning rate: 0.15
111
+ [ Wed Sep 7 23:34:17 2022 ] Mean training loss: 0.9798.
112
+ [ Wed Sep 7 23:34:17 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Wed Sep 7 23:34:17 2022 ] Training epoch: 28
114
+ [ Wed Sep 7 23:34:17 2022 ] Learning rate: 0.15
115
+ [ Wed Sep 7 23:38:40 2022 ] Mean training loss: 0.9867.
116
+ [ Wed Sep 7 23:38:40 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Wed Sep 7 23:38:40 2022 ] Training epoch: 29
118
+ [ Wed Sep 7 23:38:40 2022 ] Learning rate: 0.15
119
+ [ Wed Sep 7 23:43:03 2022 ] Mean training loss: 0.9692.
120
+ [ Wed Sep 7 23:43:03 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Wed Sep 7 23:43:03 2022 ] Training epoch: 30
122
+ [ Wed Sep 7 23:43:03 2022 ] Learning rate: 0.15
123
+ [ Wed Sep 7 23:47:27 2022 ] Mean training loss: 0.9718.
124
+ [ Wed Sep 7 23:47:27 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Wed Sep 7 23:47:27 2022 ] Training epoch: 31
126
+ [ Wed Sep 7 23:47:27 2022 ] Learning rate: 0.15
127
+ [ Wed Sep 7 23:51:50 2022 ] Mean training loss: 0.9417.
128
+ [ Wed Sep 7 23:51:50 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Wed Sep 7 23:51:50 2022 ] Training epoch: 32
130
+ [ Wed Sep 7 23:51:50 2022 ] Learning rate: 0.15
131
+ [ Wed Sep 7 23:56:14 2022 ] Mean training loss: 0.9465.
132
+ [ Wed Sep 7 23:56:14 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Wed Sep 7 23:56:14 2022 ] Training epoch: 33
134
+ [ Wed Sep 7 23:56:14 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 00:00:37 2022 ] Mean training loss: 0.9442.
136
+ [ Thu Sep 8 00:00:37 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 00:00:37 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 00:00:37 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 00:05:00 2022 ] Mean training loss: 0.9245.
140
+ [ Thu Sep 8 00:05:00 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 00:05:00 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 00:05:00 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 00:09:24 2022 ] Mean training loss: 0.9371.
144
+ [ Thu Sep 8 00:09:24 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 00:09:24 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 00:09:24 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 00:13:47 2022 ] Mean training loss: 0.9328.
148
+ [ Thu Sep 8 00:13:47 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 00:13:47 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 00:13:47 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 00:18:11 2022 ] Mean training loss: 0.9232.
152
+ [ Thu Sep 8 00:18:11 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 00:18:11 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 00:18:11 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 00:22:35 2022 ] Mean training loss: 0.9113.
156
+ [ Thu Sep 8 00:22:35 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 00:22:35 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 00:22:35 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 00:26:58 2022 ] Mean training loss: 0.9030.
160
+ [ Thu Sep 8 00:26:58 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 00:26:58 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 00:26:58 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 00:31:22 2022 ] Mean training loss: 0.9017.
164
+ [ Thu Sep 8 00:31:22 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 00:31:22 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 00:31:22 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 00:35:45 2022 ] Mean training loss: 0.8869.
168
+ [ Thu Sep 8 00:35:45 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 00:35:45 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 00:35:45 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 00:40:08 2022 ] Mean training loss: 0.8835.
172
+ [ Thu Sep 8 00:40:08 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 00:40:08 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 00:40:08 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 00:44:31 2022 ] Mean training loss: 0.8969.
176
+ [ Thu Sep 8 00:44:31 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 00:44:31 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 00:44:31 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 00:48:55 2022 ] Mean training loss: 0.8744.
180
+ [ Thu Sep 8 00:48:55 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 00:48:55 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 00:48:55 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 00:53:18 2022 ] Mean training loss: 0.8795.
184
+ [ Thu Sep 8 00:53:18 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 00:53:18 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 00:53:18 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 00:57:42 2022 ] Mean training loss: 0.8780.
188
+ [ Thu Sep 8 00:57:42 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 00:57:42 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 00:57:42 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 01:02:05 2022 ] Mean training loss: 0.8804.
192
+ [ Thu Sep 8 01:02:05 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 01:02:05 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 01:02:05 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 01:06:28 2022 ] Mean training loss: 0.8462.
196
+ [ Thu Sep 8 01:06:28 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 01:06:28 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 01:06:28 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 01:10:51 2022 ] Mean training loss: 0.8762.
200
+ [ Thu Sep 8 01:10:51 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 01:10:51 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 01:10:51 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 01:15:14 2022 ] Mean training loss: 0.8819.
204
+ [ Thu Sep 8 01:15:14 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 01:15:14 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 01:15:14 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 01:19:37 2022 ] Mean training loss: 0.4295.
208
+ [ Thu Sep 8 01:19:37 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 01:19:37 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 01:27:35 2022 ] Epoch 51 Curr Acc: (34662/59477)58.28%
211
+ [ Thu Sep 8 01:27:35 2022 ] Epoch 51 Best Acc 58.28%
212
+ [ Thu Sep 8 01:27:36 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 01:27:36 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 01:31:58 2022 ] Mean training loss: 0.2936.
215
+ [ Thu Sep 8 01:31:58 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 01:31:58 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 01:39:49 2022 ] Epoch 52 Curr Acc: (36032/59477)60.58%
218
+ [ Thu Sep 8 01:39:49 2022 ] Epoch 52 Best Acc 60.58%
219
+ [ Thu Sep 8 01:39:49 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 01:39:49 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 01:44:12 2022 ] Mean training loss: 0.2383.
222
+ [ Thu Sep 8 01:44:12 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 01:44:12 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 01:52:03 2022 ] Epoch 53 Curr Acc: (36164/59477)60.80%
225
+ [ Thu Sep 8 01:52:03 2022 ] Epoch 53 Best Acc 60.80%
226
+ [ Thu Sep 8 01:52:03 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 01:52:03 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 01:56:25 2022 ] Mean training loss: 0.2054.
229
+ [ Thu Sep 8 01:56:25 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 01:56:25 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 02:04:16 2022 ] Epoch 54 Curr Acc: (36324/59477)61.07%
232
+ [ Thu Sep 8 02:04:16 2022 ] Epoch 54 Best Acc 61.07%
233
+ [ Thu Sep 8 02:04:16 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 02:04:16 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 02:08:40 2022 ] Mean training loss: 0.1660.
236
+ [ Thu Sep 8 02:08:40 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 02:08:40 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 02:16:31 2022 ] Epoch 55 Curr Acc: (36200/59477)60.86%
239
+ [ Thu Sep 8 02:16:31 2022 ] Epoch 54 Best Acc 61.07%
240
+ [ Thu Sep 8 02:16:31 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 02:16:31 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 02:20:54 2022 ] Mean training loss: 0.1457.
243
+ [ Thu Sep 8 02:20:54 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 02:20:54 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 02:28:45 2022 ] Epoch 56 Curr Acc: (36422/59477)61.24%
246
+ [ Thu Sep 8 02:28:45 2022 ] Epoch 56 Best Acc 61.24%
247
+ [ Thu Sep 8 02:28:45 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 02:28:45 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 02:33:08 2022 ] Mean training loss: 0.1336.
250
+ [ Thu Sep 8 02:33:08 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 02:33:08 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 02:40:59 2022 ] Epoch 57 Curr Acc: (36565/59477)61.48%
253
+ [ Thu Sep 8 02:40:59 2022 ] Epoch 57 Best Acc 61.48%
254
+ [ Thu Sep 8 02:40:59 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 02:40:59 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 02:45:22 2022 ] Mean training loss: 0.1069.
257
+ [ Thu Sep 8 02:45:22 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 02:45:23 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 02:53:13 2022 ] Epoch 58 Curr Acc: (35997/59477)60.52%
260
+ [ Thu Sep 8 02:53:13 2022 ] Epoch 57 Best Acc 61.48%
261
+ [ Thu Sep 8 02:53:13 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 02:53:13 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 02:57:36 2022 ] Mean training loss: 0.0936.
264
+ [ Thu Sep 8 02:57:36 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 02:57:36 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 03:05:27 2022 ] Epoch 59 Curr Acc: (36102/59477)60.70%
267
+ [ Thu Sep 8 03:05:27 2022 ] Epoch 57 Best Acc 61.48%
268
+ [ Thu Sep 8 03:05:27 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 03:05:27 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 03:09:50 2022 ] Mean training loss: 0.0901.
271
+ [ Thu Sep 8 03:09:50 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 03:09:50 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 03:17:41 2022 ] Epoch 60 Curr Acc: (36186/59477)60.84%
274
+ [ Thu Sep 8 03:17:41 2022 ] Epoch 57 Best Acc 61.48%
275
+ [ Thu Sep 8 03:17:41 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 03:17:41 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 03:22:05 2022 ] Mean training loss: 0.0733.
278
+ [ Thu Sep 8 03:22:05 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 03:22:05 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 03:29:55 2022 ] Epoch 61 Curr Acc: (35863/59477)60.30%
281
+ [ Thu Sep 8 03:29:55 2022 ] Epoch 57 Best Acc 61.48%
282
+ [ Thu Sep 8 03:29:55 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 03:29:55 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 03:34:18 2022 ] Mean training loss: 0.0733.
285
+ [ Thu Sep 8 03:34:18 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 03:34:19 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 03:42:09 2022 ] Epoch 62 Curr Acc: (36012/59477)60.55%
288
+ [ Thu Sep 8 03:42:09 2022 ] Epoch 57 Best Acc 61.48%
289
+ [ Thu Sep 8 03:42:09 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 03:42:09 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 03:46:31 2022 ] Mean training loss: 0.0674.
292
+ [ Thu Sep 8 03:46:31 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 03:46:31 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 03:54:22 2022 ] Epoch 63 Curr Acc: (35929/59477)60.41%
295
+ [ Thu Sep 8 03:54:22 2022 ] Epoch 57 Best Acc 61.48%
296
+ [ Thu Sep 8 03:54:22 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 03:54:22 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 03:58:45 2022 ] Mean training loss: 0.0624.
299
+ [ Thu Sep 8 03:58:45 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 03:58:45 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 04:06:36 2022 ] Epoch 64 Curr Acc: (35778/59477)60.15%
302
+ [ Thu Sep 8 04:06:36 2022 ] Epoch 57 Best Acc 61.48%
303
+ [ Thu Sep 8 04:06:36 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 04:06:36 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 04:11:00 2022 ] Mean training loss: 0.0549.
306
+ [ Thu Sep 8 04:11:00 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 04:11:00 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 04:18:50 2022 ] Epoch 65 Curr Acc: (36043/59477)60.60%
309
+ [ Thu Sep 8 04:18:50 2022 ] Epoch 57 Best Acc 61.48%
310
+ [ Thu Sep 8 04:18:50 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 04:18:50 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 04:23:13 2022 ] Mean training loss: 0.0514.
313
+ [ Thu Sep 8 04:23:13 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 04:23:13 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 04:31:04 2022 ] Epoch 66 Curr Acc: (35771/59477)60.14%
316
+ [ Thu Sep 8 04:31:04 2022 ] Epoch 57 Best Acc 61.48%
317
+ [ Thu Sep 8 04:31:04 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 04:31:04 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 04:35:28 2022 ] Mean training loss: 0.0475.
320
+ [ Thu Sep 8 04:35:28 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 04:35:28 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 04:43:19 2022 ] Epoch 67 Curr Acc: (35898/59477)60.36%
323
+ [ Thu Sep 8 04:43:19 2022 ] Epoch 57 Best Acc 61.48%
324
+ [ Thu Sep 8 04:43:19 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 04:43:19 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 04:47:42 2022 ] Mean training loss: 0.0568.
327
+ [ Thu Sep 8 04:47:42 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 04:47:42 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 04:55:33 2022 ] Epoch 68 Curr Acc: (35158/59477)59.11%
330
+ [ Thu Sep 8 04:55:33 2022 ] Epoch 57 Best Acc 61.48%
331
+ [ Thu Sep 8 04:55:33 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 04:55:33 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 04:59:55 2022 ] Mean training loss: 0.0533.
334
+ [ Thu Sep 8 04:59:55 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 04:59:55 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 05:07:46 2022 ] Epoch 69 Curr Acc: (35877/59477)60.32%
337
+ [ Thu Sep 8 05:07:46 2022 ] Epoch 57 Best Acc 61.48%
338
+ [ Thu Sep 8 05:07:46 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 05:07:46 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 05:12:07 2022 ] Mean training loss: 0.0444.
341
+ [ Thu Sep 8 05:12:07 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 05:12:07 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 05:19:58 2022 ] Epoch 70 Curr Acc: (35739/59477)60.09%
344
+ [ Thu Sep 8 05:19:58 2022 ] Epoch 57 Best Acc 61.48%
345
+ [ Thu Sep 8 05:19:58 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 05:19:58 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 05:24:20 2022 ] Mean training loss: 0.0331.
348
+ [ Thu Sep 8 05:24:20 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 05:24:20 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 05:32:11 2022 ] Epoch 71 Curr Acc: (36238/59477)60.93%
351
+ [ Thu Sep 8 05:32:11 2022 ] Epoch 57 Best Acc 61.48%
352
+ [ Thu Sep 8 05:32:11 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 05:32:11 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 05:36:32 2022 ] Mean training loss: 0.0255.
355
+ [ Thu Sep 8 05:36:32 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 05:36:32 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 05:44:23 2022 ] Epoch 72 Curr Acc: (36186/59477)60.84%
358
+ [ Thu Sep 8 05:44:23 2022 ] Epoch 57 Best Acc 61.48%
359
+ [ Thu Sep 8 05:44:23 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 05:44:23 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 05:48:44 2022 ] Mean training loss: 0.0223.
362
+ [ Thu Sep 8 05:48:44 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 05:48:45 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 05:56:35 2022 ] Epoch 73 Curr Acc: (36274/59477)60.99%
365
+ [ Thu Sep 8 05:56:35 2022 ] Epoch 57 Best Acc 61.48%
366
+ [ Thu Sep 8 05:56:35 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 05:56:35 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 06:00:58 2022 ] Mean training loss: 0.0225.
369
+ [ Thu Sep 8 06:00:58 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 06:00:58 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 06:08:48 2022 ] Epoch 74 Curr Acc: (36436/59477)61.26%
372
+ [ Thu Sep 8 06:08:48 2022 ] Epoch 57 Best Acc 61.48%
373
+ [ Thu Sep 8 06:08:48 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 06:08:48 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 06:13:09 2022 ] Mean training loss: 0.0216.
376
+ [ Thu Sep 8 06:13:09 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 06:13:10 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 06:21:00 2022 ] Epoch 75 Curr Acc: (35925/59477)60.40%
379
+ [ Thu Sep 8 06:21:00 2022 ] Epoch 57 Best Acc 61.48%
380
+ [ Thu Sep 8 06:21:00 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 06:21:00 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 06:25:23 2022 ] Mean training loss: 0.0199.
383
+ [ Thu Sep 8 06:25:23 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 06:25:23 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 06:33:13 2022 ] Epoch 76 Curr Acc: (36147/59477)60.77%
386
+ [ Thu Sep 8 06:33:13 2022 ] Epoch 57 Best Acc 61.48%
387
+ [ Thu Sep 8 06:33:13 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 06:33:13 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 06:37:36 2022 ] Mean training loss: 0.0186.
390
+ [ Thu Sep 8 06:37:36 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 06:37:36 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 06:45:27 2022 ] Epoch 77 Curr Acc: (36519/59477)61.40%
393
+ [ Thu Sep 8 06:45:27 2022 ] Epoch 57 Best Acc 61.48%
394
+ [ Thu Sep 8 06:45:27 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 06:45:27 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 06:49:50 2022 ] Mean training loss: 0.0201.
397
+ [ Thu Sep 8 06:49:50 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 06:49:50 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 06:57:41 2022 ] Epoch 78 Curr Acc: (36282/59477)61.00%
400
+ [ Thu Sep 8 06:57:41 2022 ] Epoch 57 Best Acc 61.48%
401
+ [ Thu Sep 8 06:57:41 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 06:57:41 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 07:02:04 2022 ] Mean training loss: 0.0191.
404
+ [ Thu Sep 8 07:02:04 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 07:02:04 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 07:09:55 2022 ] Epoch 79 Curr Acc: (35872/59477)60.31%
407
+ [ Thu Sep 8 07:09:55 2022 ] Epoch 57 Best Acc 61.48%
408
+ [ Thu Sep 8 07:09:55 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 07:09:55 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 07:14:17 2022 ] Mean training loss: 0.0175.
411
+ [ Thu Sep 8 07:14:17 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 07:14:17 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 07:22:08 2022 ] Epoch 80 Curr Acc: (36153/59477)60.78%
414
+ [ Thu Sep 8 07:22:08 2022 ] Epoch 57 Best Acc 61.48%
415
+ [ Thu Sep 8 07:22:08 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 07:22:08 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 07:26:32 2022 ] Mean training loss: 0.0167.
418
+ [ Thu Sep 8 07:26:32 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 07:26:32 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 07:34:22 2022 ] Epoch 81 Curr Acc: (36225/59477)60.91%
421
+ [ Thu Sep 8 07:34:22 2022 ] Epoch 57 Best Acc 61.48%
422
+ [ Thu Sep 8 07:34:22 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 07:34:22 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 07:38:45 2022 ] Mean training loss: 0.0176.
425
+ [ Thu Sep 8 07:38:45 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 07:38:45 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 07:46:35 2022 ] Epoch 82 Curr Acc: (36389/59477)61.18%
428
+ [ Thu Sep 8 07:46:35 2022 ] Epoch 57 Best Acc 61.48%
429
+ [ Thu Sep 8 07:46:35 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 07:46:35 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 07:50:58 2022 ] Mean training loss: 0.0177.
432
+ [ Thu Sep 8 07:50:58 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 07:50:58 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 07:58:48 2022 ] Epoch 83 Curr Acc: (35930/59477)60.41%
435
+ [ Thu Sep 8 07:58:48 2022 ] Epoch 57 Best Acc 61.48%
436
+ [ Thu Sep 8 07:58:48 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 07:58:48 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 08:03:11 2022 ] Mean training loss: 0.0180.
439
+ [ Thu Sep 8 08:03:11 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 08:03:11 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 08:11:00 2022 ] Epoch 84 Curr Acc: (36213/59477)60.89%
442
+ [ Thu Sep 8 08:11:00 2022 ] Epoch 57 Best Acc 61.48%
443
+ [ Thu Sep 8 08:11:00 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 08:11:00 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 08:15:23 2022 ] Mean training loss: 0.0158.
446
+ [ Thu Sep 8 08:15:23 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 08:15:23 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 08:23:13 2022 ] Epoch 85 Curr Acc: (36517/59477)61.40%
449
+ [ Thu Sep 8 08:23:13 2022 ] Epoch 57 Best Acc 61.48%
450
+ [ Thu Sep 8 08:23:13 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 08:23:13 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 08:27:36 2022 ] Mean training loss: 0.0171.
453
+ [ Thu Sep 8 08:27:36 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 08:27:36 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 08:35:25 2022 ] Epoch 86 Curr Acc: (36145/59477)60.77%
456
+ [ Thu Sep 8 08:35:25 2022 ] Epoch 57 Best Acc 61.48%
457
+ [ Thu Sep 8 08:35:25 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 08:35:25 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 08:39:48 2022 ] Mean training loss: 0.0155.
460
+ [ Thu Sep 8 08:39:48 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 08:39:48 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 08:47:38 2022 ] Epoch 87 Curr Acc: (36236/59477)60.92%
463
+ [ Thu Sep 8 08:47:38 2022 ] Epoch 57 Best Acc 61.48%
464
+ [ Thu Sep 8 08:47:38 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 08:47:38 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 08:52:01 2022 ] Mean training loss: 0.0173.
467
+ [ Thu Sep 8 08:52:01 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 08:52:01 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 08:59:51 2022 ] Epoch 88 Curr Acc: (36280/59477)61.00%
470
+ [ Thu Sep 8 08:59:51 2022 ] Epoch 57 Best Acc 61.48%
471
+ [ Thu Sep 8 08:59:51 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 08:59:51 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 09:04:13 2022 ] Mean training loss: 0.0153.
474
+ [ Thu Sep 8 09:04:13 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 09:04:13 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 09:12:03 2022 ] Epoch 89 Curr Acc: (36354/59477)61.12%
477
+ [ Thu Sep 8 09:12:03 2022 ] Epoch 57 Best Acc 61.48%
478
+ [ Thu Sep 8 09:12:03 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 09:12:03 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 09:16:26 2022 ] Mean training loss: 0.0148.
481
+ [ Thu Sep 8 09:16:26 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 09:16:26 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 09:24:16 2022 ] Epoch 90 Curr Acc: (36149/59477)60.78%
484
+ [ Thu Sep 8 09:24:16 2022 ] Epoch 57 Best Acc 61.48%
485
+ [ Thu Sep 8 09:24:16 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 09:24:16 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 09:28:39 2022 ] Mean training loss: 0.0165.
488
+ [ Thu Sep 8 09:28:39 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 09:28:39 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 09:36:29 2022 ] Epoch 91 Curr Acc: (36326/59477)61.08%
491
+ [ Thu Sep 8 09:36:29 2022 ] Epoch 57 Best Acc 61.48%
492
+ [ Thu Sep 8 09:36:29 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 09:36:29 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 09:40:52 2022 ] Mean training loss: 0.0171.
495
+ [ Thu Sep 8 09:40:52 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 09:40:52 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 09:48:43 2022 ] Epoch 92 Curr Acc: (36436/59477)61.26%
498
+ [ Thu Sep 8 09:48:43 2022 ] Epoch 57 Best Acc 61.48%
499
+ [ Thu Sep 8 09:48:43 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 09:48:43 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 09:53:06 2022 ] Mean training loss: 0.0143.
502
+ [ Thu Sep 8 09:53:06 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 09:53:06 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 10:00:57 2022 ] Epoch 93 Curr Acc: (36333/59477)61.09%
505
+ [ Thu Sep 8 10:00:57 2022 ] Epoch 57 Best Acc 61.48%
506
+ [ Thu Sep 8 10:00:57 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 10:00:57 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 10:05:19 2022 ] Mean training loss: 0.0159.
509
+ [ Thu Sep 8 10:05:19 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 10:05:19 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 10:13:10 2022 ] Epoch 94 Curr Acc: (36458/59477)61.30%
512
+ [ Thu Sep 8 10:13:10 2022 ] Epoch 57 Best Acc 61.48%
513
+ [ Thu Sep 8 10:13:10 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 10:13:10 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 10:17:32 2022 ] Mean training loss: 0.0151.
516
+ [ Thu Sep 8 10:17:32 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 10:17:32 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 10:25:23 2022 ] Epoch 95 Curr Acc: (36264/59477)60.97%
519
+ [ Thu Sep 8 10:25:23 2022 ] Epoch 57 Best Acc 61.48%
520
+ [ Thu Sep 8 10:25:23 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 10:25:23 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 10:29:46 2022 ] Mean training loss: 0.0157.
523
+ [ Thu Sep 8 10:29:46 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 10:29:46 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 10:37:37 2022 ] Epoch 96 Curr Acc: (36347/59477)61.11%
526
+ [ Thu Sep 8 10:37:37 2022 ] Epoch 57 Best Acc 61.48%
527
+ [ Thu Sep 8 10:37:37 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 10:37:37 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 10:42:00 2022 ] Mean training loss: 0.0149.
530
+ [ Thu Sep 8 10:42:00 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 10:42:00 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 10:49:51 2022 ] Epoch 97 Curr Acc: (36200/59477)60.86%
533
+ [ Thu Sep 8 10:49:51 2022 ] Epoch 57 Best Acc 61.48%
534
+ [ Thu Sep 8 10:49:51 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 10:49:51 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 10:54:14 2022 ] Mean training loss: 0.0158.
537
+ [ Thu Sep 8 10:54:14 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 10:54:14 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 11:02:05 2022 ] Epoch 98 Curr Acc: (36354/59477)61.12%
540
+ [ Thu Sep 8 11:02:05 2022 ] Epoch 57 Best Acc 61.48%
541
+ [ Thu Sep 8 11:02:05 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 11:02:05 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 11:06:29 2022 ] Mean training loss: 0.0161.
544
+ [ Thu Sep 8 11:06:29 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 11:06:29 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 11:14:20 2022 ] Epoch 99 Curr Acc: (36426/59477)61.24%
547
+ [ Thu Sep 8 11:14:20 2022 ] Epoch 57 Best Acc 61.48%
548
+ [ Thu Sep 8 11:14:20 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 11:14:20 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 11:18:42 2022 ] Mean training loss: 0.0150.
551
+ [ Thu Sep 8 11:18:42 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 11:18:43 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 11:26:34 2022 ] Epoch 100 Curr Acc: (36627/59477)61.58%
554
+ [ Thu Sep 8 11:26:34 2022 ] Epoch 100 Best Acc 61.58%
555
+ [ Thu Sep 8 11:26:34 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 11:26:34 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 11:30:57 2022 ] Mean training loss: 0.0143.
558
+ [ Thu Sep 8 11:30:57 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 11:30:57 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 11:38:48 2022 ] Epoch 101 Curr Acc: (36316/59477)61.06%
561
+ [ Thu Sep 8 11:38:48 2022 ] Epoch 100 Best Acc 61.58%
562
+ [ Thu Sep 8 11:38:48 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 11:38:48 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 11:43:11 2022 ] Mean training loss: 0.0143.
565
+ [ Thu Sep 8 11:43:11 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 11:43:11 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 11:51:02 2022 ] Epoch 102 Curr Acc: (36425/59477)61.24%
568
+ [ Thu Sep 8 11:51:02 2022 ] Epoch 100 Best Acc 61.58%
569
+ [ Thu Sep 8 11:51:02 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 11:51:02 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 11:55:25 2022 ] Mean training loss: 0.0154.
572
+ [ Thu Sep 8 11:55:25 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 11:55:25 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 12:03:16 2022 ] Epoch 103 Curr Acc: (36413/59477)61.22%
575
+ [ Thu Sep 8 12:03:16 2022 ] Epoch 100 Best Acc 61.58%
576
+ [ Thu Sep 8 12:03:16 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 12:03:16 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 12:07:39 2022 ] Mean training loss: 0.0147.
579
+ [ Thu Sep 8 12:07:39 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 12:07:39 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 12:15:31 2022 ] Epoch 104 Curr Acc: (36372/59477)61.15%
582
+ [ Thu Sep 8 12:15:31 2022 ] Epoch 100 Best Acc 61.58%
583
+ [ Thu Sep 8 12:15:31 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 12:15:31 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 12:19:52 2022 ] Mean training loss: 0.0152.
586
+ [ Thu Sep 8 12:19:52 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 12:19:52 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 12:27:43 2022 ] Epoch 105 Curr Acc: (36226/59477)60.91%
589
+ [ Thu Sep 8 12:27:43 2022 ] Epoch 100 Best Acc 61.58%
590
+ [ Thu Sep 8 12:27:43 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 12:27:43 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 12:32:06 2022 ] Mean training loss: 0.0142.
593
+ [ Thu Sep 8 12:32:06 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 12:32:06 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 12:39:58 2022 ] Epoch 106 Curr Acc: (36352/59477)61.12%
596
+ [ Thu Sep 8 12:39:58 2022 ] Epoch 100 Best Acc 61.58%
597
+ [ Thu Sep 8 12:39:58 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 12:39:58 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 12:44:21 2022 ] Mean training loss: 0.0143.
600
+ [ Thu Sep 8 12:44:21 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 12:44:21 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 12:52:12 2022 ] Epoch 107 Curr Acc: (36122/59477)60.73%
603
+ [ Thu Sep 8 12:52:12 2022 ] Epoch 100 Best Acc 61.58%
604
+ [ Thu Sep 8 12:52:12 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 12:52:12 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 12:56:35 2022 ] Mean training loss: 0.0145.
607
+ [ Thu Sep 8 12:56:35 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 12:56:36 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 13:04:27 2022 ] Epoch 108 Curr Acc: (36278/59477)61.00%
610
+ [ Thu Sep 8 13:04:27 2022 ] Epoch 100 Best Acc 61.58%
611
+ [ Thu Sep 8 13:04:27 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 13:04:27 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 13:08:50 2022 ] Mean training loss: 0.0153.
614
+ [ Thu Sep 8 13:08:50 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 13:08:50 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 13:16:41 2022 ] Epoch 109 Curr Acc: (36406/59477)61.21%
617
+ [ Thu Sep 8 13:16:41 2022 ] Epoch 100 Best Acc 61.58%
618
+ [ Thu Sep 8 13:16:41 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 13:16:41 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 13:21:05 2022 ] Mean training loss: 0.0155.
621
+ [ Thu Sep 8 13:21:05 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 13:21:05 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 13:28:56 2022 ] Epoch 110 Curr Acc: (36290/59477)61.02%
624
+ [ Thu Sep 8 13:28:56 2022 ] Epoch 100 Best Acc 61.58%
625
+ [ Thu Sep 8 13:28:56 2022 ] epoch: 100, best accuracy: 0.6158178791801873
626
+ [ Thu Sep 8 13:28:56 2022 ] Experiment: ./work_dir/ntu120/xset_j
627
+ [ Thu Sep 8 13:28:56 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 13:28:56 2022 ] Load weights from ./runs/ntu120/xset_j/runs-99-129700.pt.
629
+ [ Thu Sep 8 13:28:57 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 13:36:47 2022 ] Epoch 1 Curr Acc: (36627/59477)61.58%
631
+ [ Thu Sep 8 13:36:47 2022 ] Epoch 100 Best Acc 61.58%
ckpt/Others/MST-GCN/ntu120_xset/xset_jm/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xset/xset_jm/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xset_jm.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xset_jm/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xset_jm
ckpt/Others/MST-GCN/ntu120_xset/xset_jm/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f7933a777dd27a03db074d25e7aa8607a961bc65883dbcfcfae7fb523d2ac29f
3
+ size 34946665
ckpt/Others/MST-GCN/ntu120_xset/xset_jm/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:35:56 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:35:57 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xset_jm', 'model_saved_name': './runs/ntu120/xset_jm/runs', 'config': 'config/ntu120/xset_jm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xset/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:35:57 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:35:57 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:40:23 2022 ] Mean training loss: 3.7301.
8
+ [ Wed Sep 7 21:40:23 2022 ] Time consumption: [Data]01%, [Network]99%
9
+ [ Wed Sep 7 21:40:23 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:40:23 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:44:51 2022 ] Mean training loss: 2.7842.
12
+ [ Wed Sep 7 21:44:51 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:44:51 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:44:51 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:49:19 2022 ] Mean training loss: 2.2637.
16
+ [ Wed Sep 7 21:49:19 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:49:19 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:49:19 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 21:53:47 2022 ] Mean training loss: 1.9632.
20
+ [ Wed Sep 7 21:53:47 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 21:53:47 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 21:53:47 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 21:58:16 2022 ] Mean training loss: 1.7516.
24
+ [ Wed Sep 7 21:58:16 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 21:58:16 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 21:58:16 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:02:44 2022 ] Mean training loss: 1.6499.
28
+ [ Wed Sep 7 22:02:44 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:02:44 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:02:44 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:07:13 2022 ] Mean training loss: 1.5352.
32
+ [ Wed Sep 7 22:07:13 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:07:13 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:07:13 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:11:41 2022 ] Mean training loss: 1.4923.
36
+ [ Wed Sep 7 22:11:41 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:11:41 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:11:41 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:16:10 2022 ] Mean training loss: 1.4588.
40
+ [ Wed Sep 7 22:16:10 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:16:10 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:16:10 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:20:37 2022 ] Mean training loss: 1.4317.
44
+ [ Wed Sep 7 22:20:37 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:20:37 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:20:37 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:25:06 2022 ] Mean training loss: 1.3728.
48
+ [ Wed Sep 7 22:25:06 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:25:06 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:25:06 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:29:34 2022 ] Mean training loss: 1.3204.
52
+ [ Wed Sep 7 22:29:34 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:29:34 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:29:34 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 22:34:01 2022 ] Mean training loss: 1.2679.
56
+ [ Wed Sep 7 22:34:01 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 22:34:01 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 22:34:01 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 22:38:29 2022 ] Mean training loss: 1.2497.
60
+ [ Wed Sep 7 22:38:29 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 22:38:29 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 22:38:29 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 22:42:56 2022 ] Mean training loss: 1.2155.
64
+ [ Wed Sep 7 22:42:56 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 22:42:56 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 22:42:56 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 22:47:23 2022 ] Mean training loss: 1.1775.
68
+ [ Wed Sep 7 22:47:23 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 22:47:23 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 22:47:23 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 22:51:52 2022 ] Mean training loss: 1.1661.
72
+ [ Wed Sep 7 22:51:52 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 22:51:52 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 22:51:52 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 22:56:19 2022 ] Mean training loss: 1.1315.
76
+ [ Wed Sep 7 22:56:19 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 22:56:19 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 22:56:19 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 23:00:47 2022 ] Mean training loss: 1.1277.
80
+ [ Wed Sep 7 23:00:47 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 23:00:47 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 23:00:47 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:05:14 2022 ] Mean training loss: 1.1047.
84
+ [ Wed Sep 7 23:05:14 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:05:14 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:05:14 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:09:42 2022 ] Mean training loss: 1.0754.
88
+ [ Wed Sep 7 23:09:42 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:09:42 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:09:42 2022 ] Learning rate: 0.15
91
+ [ Wed Sep 7 23:14:10 2022 ] Mean training loss: 1.0693.
92
+ [ Wed Sep 7 23:14:10 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Wed Sep 7 23:14:10 2022 ] Training epoch: 23
94
+ [ Wed Sep 7 23:14:10 2022 ] Learning rate: 0.15
95
+ [ Wed Sep 7 23:18:37 2022 ] Mean training loss: 1.0565.
96
+ [ Wed Sep 7 23:18:37 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Wed Sep 7 23:18:37 2022 ] Training epoch: 24
98
+ [ Wed Sep 7 23:18:37 2022 ] Learning rate: 0.15
99
+ [ Wed Sep 7 23:23:05 2022 ] Mean training loss: 1.0643.
100
+ [ Wed Sep 7 23:23:05 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Wed Sep 7 23:23:05 2022 ] Training epoch: 25
102
+ [ Wed Sep 7 23:23:05 2022 ] Learning rate: 0.15
103
+ [ Wed Sep 7 23:27:33 2022 ] Mean training loss: 1.0342.
104
+ [ Wed Sep 7 23:27:33 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Wed Sep 7 23:27:33 2022 ] Training epoch: 26
106
+ [ Wed Sep 7 23:27:33 2022 ] Learning rate: 0.15
107
+ [ Wed Sep 7 23:32:01 2022 ] Mean training loss: 1.0300.
108
+ [ Wed Sep 7 23:32:01 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Wed Sep 7 23:32:01 2022 ] Training epoch: 27
110
+ [ Wed Sep 7 23:32:01 2022 ] Learning rate: 0.15
111
+ [ Wed Sep 7 23:36:30 2022 ] Mean training loss: 1.0195.
112
+ [ Wed Sep 7 23:36:30 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Wed Sep 7 23:36:30 2022 ] Training epoch: 28
114
+ [ Wed Sep 7 23:36:30 2022 ] Learning rate: 0.15
115
+ [ Wed Sep 7 23:40:58 2022 ] Mean training loss: 1.0123.
116
+ [ Wed Sep 7 23:40:58 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Wed Sep 7 23:40:58 2022 ] Training epoch: 29
118
+ [ Wed Sep 7 23:40:58 2022 ] Learning rate: 0.15
119
+ [ Wed Sep 7 23:45:26 2022 ] Mean training loss: 0.9886.
120
+ [ Wed Sep 7 23:45:26 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Wed Sep 7 23:45:26 2022 ] Training epoch: 30
122
+ [ Wed Sep 7 23:45:26 2022 ] Learning rate: 0.15
123
+ [ Wed Sep 7 23:49:54 2022 ] Mean training loss: 0.9750.
124
+ [ Wed Sep 7 23:49:54 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Wed Sep 7 23:49:54 2022 ] Training epoch: 31
126
+ [ Wed Sep 7 23:49:54 2022 ] Learning rate: 0.15
127
+ [ Wed Sep 7 23:54:22 2022 ] Mean training loss: 0.9614.
128
+ [ Wed Sep 7 23:54:22 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Wed Sep 7 23:54:22 2022 ] Training epoch: 32
130
+ [ Wed Sep 7 23:54:22 2022 ] Learning rate: 0.15
131
+ [ Wed Sep 7 23:58:50 2022 ] Mean training loss: 0.9800.
132
+ [ Wed Sep 7 23:58:50 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Wed Sep 7 23:58:50 2022 ] Training epoch: 33
134
+ [ Wed Sep 7 23:58:50 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 00:03:18 2022 ] Mean training loss: 0.9875.
136
+ [ Thu Sep 8 00:03:18 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 00:03:18 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 00:03:18 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 00:07:45 2022 ] Mean training loss: 0.9604.
140
+ [ Thu Sep 8 00:07:45 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 00:07:45 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 00:07:45 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 00:12:12 2022 ] Mean training loss: 0.9519.
144
+ [ Thu Sep 8 00:12:12 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 00:12:12 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 00:12:12 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 00:16:39 2022 ] Mean training loss: 0.9461.
148
+ [ Thu Sep 8 00:16:39 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 00:16:39 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 00:16:39 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 00:21:05 2022 ] Mean training loss: 0.9580.
152
+ [ Thu Sep 8 00:21:05 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 00:21:05 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 00:21:05 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 00:25:32 2022 ] Mean training loss: 0.9211.
156
+ [ Thu Sep 8 00:25:32 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 00:25:32 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 00:25:32 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 00:29:59 2022 ] Mean training loss: 0.9380.
160
+ [ Thu Sep 8 00:29:59 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 00:29:59 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 00:29:59 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 00:34:26 2022 ] Mean training loss: 0.9347.
164
+ [ Thu Sep 8 00:34:26 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 00:34:26 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 00:34:26 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 00:38:53 2022 ] Mean training loss: 0.9027.
168
+ [ Thu Sep 8 00:38:53 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 00:38:53 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 00:38:53 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 00:43:20 2022 ] Mean training loss: 0.9365.
172
+ [ Thu Sep 8 00:43:20 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 00:43:20 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 00:43:20 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 00:47:48 2022 ] Mean training loss: 0.9236.
176
+ [ Thu Sep 8 00:47:48 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 00:47:48 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 00:47:48 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 00:52:16 2022 ] Mean training loss: 0.9109.
180
+ [ Thu Sep 8 00:52:16 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 00:52:16 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 00:52:16 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 00:56:44 2022 ] Mean training loss: 0.9087.
184
+ [ Thu Sep 8 00:56:44 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 00:56:44 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 00:56:44 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 01:01:11 2022 ] Mean training loss: 0.9218.
188
+ [ Thu Sep 8 01:01:11 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 01:01:11 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 01:01:11 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 01:05:39 2022 ] Mean training loss: 0.8873.
192
+ [ Thu Sep 8 01:05:39 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 01:05:39 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 01:05:39 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 01:10:07 2022 ] Mean training loss: 0.8890.
196
+ [ Thu Sep 8 01:10:07 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 01:10:07 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 01:10:07 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 01:14:35 2022 ] Mean training loss: 0.8973.
200
+ [ Thu Sep 8 01:14:35 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 01:14:35 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 01:14:35 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 01:19:02 2022 ] Mean training loss: 0.9033.
204
+ [ Thu Sep 8 01:19:02 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 01:19:02 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 01:19:02 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 01:23:31 2022 ] Mean training loss: 0.4222.
208
+ [ Thu Sep 8 01:23:31 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 01:23:31 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 01:31:34 2022 ] Epoch 51 Curr Acc: (32813/59477)55.17%
211
+ [ Thu Sep 8 01:31:34 2022 ] Epoch 51 Best Acc 55.17%
212
+ [ Thu Sep 8 01:31:34 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 01:31:34 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 01:36:01 2022 ] Mean training loss: 0.2876.
215
+ [ Thu Sep 8 01:36:01 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 01:36:01 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 01:43:52 2022 ] Epoch 52 Curr Acc: (33442/59477)56.23%
218
+ [ Thu Sep 8 01:43:52 2022 ] Epoch 52 Best Acc 56.23%
219
+ [ Thu Sep 8 01:43:52 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 01:43:52 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 01:48:19 2022 ] Mean training loss: 0.2285.
222
+ [ Thu Sep 8 01:48:19 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 01:48:19 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 01:56:10 2022 ] Epoch 53 Curr Acc: (34407/59477)57.85%
225
+ [ Thu Sep 8 01:56:10 2022 ] Epoch 53 Best Acc 57.85%
226
+ [ Thu Sep 8 01:56:10 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 01:56:10 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 02:00:37 2022 ] Mean training loss: 0.1980.
229
+ [ Thu Sep 8 02:00:37 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 02:00:37 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 02:08:28 2022 ] Epoch 54 Curr Acc: (33565/59477)56.43%
232
+ [ Thu Sep 8 02:08:28 2022 ] Epoch 53 Best Acc 57.85%
233
+ [ Thu Sep 8 02:08:28 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 02:08:28 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 02:12:55 2022 ] Mean training loss: 0.1544.
236
+ [ Thu Sep 8 02:12:55 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 02:12:55 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 02:20:46 2022 ] Epoch 55 Curr Acc: (33778/59477)56.79%
239
+ [ Thu Sep 8 02:20:46 2022 ] Epoch 53 Best Acc 57.85%
240
+ [ Thu Sep 8 02:20:46 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 02:20:46 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 02:25:13 2022 ] Mean training loss: 0.1342.
243
+ [ Thu Sep 8 02:25:13 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 02:25:13 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 02:33:04 2022 ] Epoch 56 Curr Acc: (32699/59477)54.98%
246
+ [ Thu Sep 8 02:33:04 2022 ] Epoch 53 Best Acc 57.85%
247
+ [ Thu Sep 8 02:33:04 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 02:33:04 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 02:37:30 2022 ] Mean training loss: 0.1154.
250
+ [ Thu Sep 8 02:37:30 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 02:37:30 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 02:45:21 2022 ] Epoch 57 Curr Acc: (34318/59477)57.70%
253
+ [ Thu Sep 8 02:45:21 2022 ] Epoch 53 Best Acc 57.85%
254
+ [ Thu Sep 8 02:45:21 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 02:45:21 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 02:49:49 2022 ] Mean training loss: 0.0991.
257
+ [ Thu Sep 8 02:49:49 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 02:49:49 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 02:57:40 2022 ] Epoch 58 Curr Acc: (33961/59477)57.10%
260
+ [ Thu Sep 8 02:57:40 2022 ] Epoch 53 Best Acc 57.85%
261
+ [ Thu Sep 8 02:57:40 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 02:57:40 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 03:02:07 2022 ] Mean training loss: 0.0811.
264
+ [ Thu Sep 8 03:02:07 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 03:02:07 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 03:09:59 2022 ] Epoch 59 Curr Acc: (34302/59477)57.67%
267
+ [ Thu Sep 8 03:09:59 2022 ] Epoch 53 Best Acc 57.85%
268
+ [ Thu Sep 8 03:09:59 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 03:09:59 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 03:14:26 2022 ] Mean training loss: 0.0705.
271
+ [ Thu Sep 8 03:14:26 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 03:14:26 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 03:22:17 2022 ] Epoch 60 Curr Acc: (33822/59477)56.87%
274
+ [ Thu Sep 8 03:22:17 2022 ] Epoch 53 Best Acc 57.85%
275
+ [ Thu Sep 8 03:22:17 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 03:22:17 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 03:26:43 2022 ] Mean training loss: 0.0648.
278
+ [ Thu Sep 8 03:26:43 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 03:26:43 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 03:34:34 2022 ] Epoch 61 Curr Acc: (31688/59477)53.28%
281
+ [ Thu Sep 8 03:34:34 2022 ] Epoch 53 Best Acc 57.85%
282
+ [ Thu Sep 8 03:34:34 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 03:34:34 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 03:39:00 2022 ] Mean training loss: 0.0592.
285
+ [ Thu Sep 8 03:39:00 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 03:39:00 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 03:46:51 2022 ] Epoch 62 Curr Acc: (33950/59477)57.08%
288
+ [ Thu Sep 8 03:46:51 2022 ] Epoch 53 Best Acc 57.85%
289
+ [ Thu Sep 8 03:46:51 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 03:46:51 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 03:51:18 2022 ] Mean training loss: 0.0518.
292
+ [ Thu Sep 8 03:51:18 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 03:51:18 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 03:59:09 2022 ] Epoch 63 Curr Acc: (34179/59477)57.47%
295
+ [ Thu Sep 8 03:59:09 2022 ] Epoch 53 Best Acc 57.85%
296
+ [ Thu Sep 8 03:59:09 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 03:59:09 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 04:03:36 2022 ] Mean training loss: 0.0489.
299
+ [ Thu Sep 8 04:03:36 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 04:03:36 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 04:11:26 2022 ] Epoch 64 Curr Acc: (34018/59477)57.20%
302
+ [ Thu Sep 8 04:11:26 2022 ] Epoch 53 Best Acc 57.85%
303
+ [ Thu Sep 8 04:11:26 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 04:11:26 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 04:15:53 2022 ] Mean training loss: 0.0428.
306
+ [ Thu Sep 8 04:15:53 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 04:15:53 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 04:23:44 2022 ] Epoch 65 Curr Acc: (32835/59477)55.21%
309
+ [ Thu Sep 8 04:23:44 2022 ] Epoch 53 Best Acc 57.85%
310
+ [ Thu Sep 8 04:23:44 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 04:23:44 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 04:28:10 2022 ] Mean training loss: 0.0445.
313
+ [ Thu Sep 8 04:28:10 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 04:28:11 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 04:36:01 2022 ] Epoch 66 Curr Acc: (33433/59477)56.21%
316
+ [ Thu Sep 8 04:36:01 2022 ] Epoch 53 Best Acc 57.85%
317
+ [ Thu Sep 8 04:36:01 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 04:36:01 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 04:40:27 2022 ] Mean training loss: 0.0374.
320
+ [ Thu Sep 8 04:40:27 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 04:40:27 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 04:48:18 2022 ] Epoch 67 Curr Acc: (33614/59477)56.52%
323
+ [ Thu Sep 8 04:48:18 2022 ] Epoch 53 Best Acc 57.85%
324
+ [ Thu Sep 8 04:48:18 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 04:48:18 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 04:52:44 2022 ] Mean training loss: 0.0423.
327
+ [ Thu Sep 8 04:52:44 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 04:52:44 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 05:00:34 2022 ] Epoch 68 Curr Acc: (33131/59477)55.70%
330
+ [ Thu Sep 8 05:00:34 2022 ] Epoch 53 Best Acc 57.85%
331
+ [ Thu Sep 8 05:00:35 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 05:00:35 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 05:05:00 2022 ] Mean training loss: 0.0395.
334
+ [ Thu Sep 8 05:05:00 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 05:05:00 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 05:12:51 2022 ] Epoch 69 Curr Acc: (33446/59477)56.23%
337
+ [ Thu Sep 8 05:12:51 2022 ] Epoch 53 Best Acc 57.85%
338
+ [ Thu Sep 8 05:12:51 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 05:12:51 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 05:17:17 2022 ] Mean training loss: 0.0333.
341
+ [ Thu Sep 8 05:17:17 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 05:17:17 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 05:25:07 2022 ] Epoch 70 Curr Acc: (33792/59477)56.82%
344
+ [ Thu Sep 8 05:25:07 2022 ] Epoch 53 Best Acc 57.85%
345
+ [ Thu Sep 8 05:25:08 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 05:25:08 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 05:29:33 2022 ] Mean training loss: 0.0233.
348
+ [ Thu Sep 8 05:29:33 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 05:29:33 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 05:37:24 2022 ] Epoch 71 Curr Acc: (33925/59477)57.04%
351
+ [ Thu Sep 8 05:37:24 2022 ] Epoch 53 Best Acc 57.85%
352
+ [ Thu Sep 8 05:37:24 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 05:37:24 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 05:41:49 2022 ] Mean training loss: 0.0224.
355
+ [ Thu Sep 8 05:41:49 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 05:41:49 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 05:49:40 2022 ] Epoch 72 Curr Acc: (34114/59477)57.36%
358
+ [ Thu Sep 8 05:49:40 2022 ] Epoch 53 Best Acc 57.85%
359
+ [ Thu Sep 8 05:49:40 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 05:49:40 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 05:54:06 2022 ] Mean training loss: 0.0193.
362
+ [ Thu Sep 8 05:54:06 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 05:54:06 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 06:01:57 2022 ] Epoch 73 Curr Acc: (33739/59477)56.73%
365
+ [ Thu Sep 8 06:01:57 2022 ] Epoch 53 Best Acc 57.85%
366
+ [ Thu Sep 8 06:01:57 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 06:01:57 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 06:06:23 2022 ] Mean training loss: 0.0176.
369
+ [ Thu Sep 8 06:06:23 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 06:06:23 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 06:14:13 2022 ] Epoch 74 Curr Acc: (34195/59477)57.49%
372
+ [ Thu Sep 8 06:14:13 2022 ] Epoch 53 Best Acc 57.85%
373
+ [ Thu Sep 8 06:14:13 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 06:14:13 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 06:18:39 2022 ] Mean training loss: 0.0177.
376
+ [ Thu Sep 8 06:18:39 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 06:18:39 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 06:26:30 2022 ] Epoch 75 Curr Acc: (34027/59477)57.21%
379
+ [ Thu Sep 8 06:26:30 2022 ] Epoch 53 Best Acc 57.85%
380
+ [ Thu Sep 8 06:26:30 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 06:26:30 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 06:30:56 2022 ] Mean training loss: 0.0164.
383
+ [ Thu Sep 8 06:30:56 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 06:30:56 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 06:38:47 2022 ] Epoch 76 Curr Acc: (34208/59477)57.51%
386
+ [ Thu Sep 8 06:38:47 2022 ] Epoch 53 Best Acc 57.85%
387
+ [ Thu Sep 8 06:38:47 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 06:38:47 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 06:43:13 2022 ] Mean training loss: 0.0160.
390
+ [ Thu Sep 8 06:43:13 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 06:43:13 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 06:51:03 2022 ] Epoch 77 Curr Acc: (34368/59477)57.78%
393
+ [ Thu Sep 8 06:51:03 2022 ] Epoch 53 Best Acc 57.85%
394
+ [ Thu Sep 8 06:51:03 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 06:51:03 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 06:55:29 2022 ] Mean training loss: 0.0170.
397
+ [ Thu Sep 8 06:55:29 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 06:55:29 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 07:03:20 2022 ] Epoch 78 Curr Acc: (34327/59477)57.71%
400
+ [ Thu Sep 8 07:03:20 2022 ] Epoch 53 Best Acc 57.85%
401
+ [ Thu Sep 8 07:03:20 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 07:03:20 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 07:07:45 2022 ] Mean training loss: 0.0164.
404
+ [ Thu Sep 8 07:07:45 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 07:07:45 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 07:15:36 2022 ] Epoch 79 Curr Acc: (33012/59477)55.50%
407
+ [ Thu Sep 8 07:15:36 2022 ] Epoch 53 Best Acc 57.85%
408
+ [ Thu Sep 8 07:15:36 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 07:15:36 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 07:20:01 2022 ] Mean training loss: 0.0164.
411
+ [ Thu Sep 8 07:20:01 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 07:20:01 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 07:27:52 2022 ] Epoch 80 Curr Acc: (34292/59477)57.66%
414
+ [ Thu Sep 8 07:27:52 2022 ] Epoch 53 Best Acc 57.85%
415
+ [ Thu Sep 8 07:27:52 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 07:27:52 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 07:32:18 2022 ] Mean training loss: 0.0143.
418
+ [ Thu Sep 8 07:32:18 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 07:32:18 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 07:40:09 2022 ] Epoch 81 Curr Acc: (34228/59477)57.55%
421
+ [ Thu Sep 8 07:40:09 2022 ] Epoch 53 Best Acc 57.85%
422
+ [ Thu Sep 8 07:40:09 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 07:40:09 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 07:44:35 2022 ] Mean training loss: 0.0158.
425
+ [ Thu Sep 8 07:44:35 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 07:44:35 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 07:52:25 2022 ] Epoch 82 Curr Acc: (34370/59477)57.79%
428
+ [ Thu Sep 8 07:52:25 2022 ] Epoch 53 Best Acc 57.85%
429
+ [ Thu Sep 8 07:52:25 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 07:52:25 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 07:56:50 2022 ] Mean training loss: 0.0141.
432
+ [ Thu Sep 8 07:56:50 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 07:56:50 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 08:04:41 2022 ] Epoch 83 Curr Acc: (34255/59477)57.59%
435
+ [ Thu Sep 8 08:04:41 2022 ] Epoch 53 Best Acc 57.85%
436
+ [ Thu Sep 8 08:04:41 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 08:04:41 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 08:09:06 2022 ] Mean training loss: 0.0151.
439
+ [ Thu Sep 8 08:09:06 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 08:09:06 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 08:16:57 2022 ] Epoch 84 Curr Acc: (34008/59477)57.18%
442
+ [ Thu Sep 8 08:16:57 2022 ] Epoch 53 Best Acc 57.85%
443
+ [ Thu Sep 8 08:16:57 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 08:16:57 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 08:21:22 2022 ] Mean training loss: 0.0156.
446
+ [ Thu Sep 8 08:21:22 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 08:21:22 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 08:29:13 2022 ] Epoch 85 Curr Acc: (33965/59477)57.11%
449
+ [ Thu Sep 8 08:29:13 2022 ] Epoch 53 Best Acc 57.85%
450
+ [ Thu Sep 8 08:29:13 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 08:29:13 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 08:33:37 2022 ] Mean training loss: 0.0137.
453
+ [ Thu Sep 8 08:33:37 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 08:33:37 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 08:41:28 2022 ] Epoch 86 Curr Acc: (34226/59477)57.54%
456
+ [ Thu Sep 8 08:41:28 2022 ] Epoch 53 Best Acc 57.85%
457
+ [ Thu Sep 8 08:41:28 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 08:41:28 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 08:45:54 2022 ] Mean training loss: 0.0140.
460
+ [ Thu Sep 8 08:45:54 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 08:45:54 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 08:53:44 2022 ] Epoch 87 Curr Acc: (34184/59477)57.47%
463
+ [ Thu Sep 8 08:53:44 2022 ] Epoch 53 Best Acc 57.85%
464
+ [ Thu Sep 8 08:53:44 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 08:53:44 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 08:58:10 2022 ] Mean training loss: 0.0153.
467
+ [ Thu Sep 8 08:58:10 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 08:58:10 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 09:06:01 2022 ] Epoch 88 Curr Acc: (32881/59477)55.28%
470
+ [ Thu Sep 8 09:06:01 2022 ] Epoch 53 Best Acc 57.85%
471
+ [ Thu Sep 8 09:06:01 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 09:06:01 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 09:10:27 2022 ] Mean training loss: 0.0135.
474
+ [ Thu Sep 8 09:10:27 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 09:10:27 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 09:18:18 2022 ] Epoch 89 Curr Acc: (33262/59477)55.92%
477
+ [ Thu Sep 8 09:18:18 2022 ] Epoch 53 Best Acc 57.85%
478
+ [ Thu Sep 8 09:18:18 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 09:18:18 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 09:22:44 2022 ] Mean training loss: 0.0137.
481
+ [ Thu Sep 8 09:22:44 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 09:22:44 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 09:30:35 2022 ] Epoch 90 Curr Acc: (33971/59477)57.12%
484
+ [ Thu Sep 8 09:30:35 2022 ] Epoch 53 Best Acc 57.85%
485
+ [ Thu Sep 8 09:30:35 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 09:30:35 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 09:35:01 2022 ] Mean training loss: 0.0136.
488
+ [ Thu Sep 8 09:35:01 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 09:35:01 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 09:42:51 2022 ] Epoch 91 Curr Acc: (34256/59477)57.60%
491
+ [ Thu Sep 8 09:42:51 2022 ] Epoch 53 Best Acc 57.85%
492
+ [ Thu Sep 8 09:42:51 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 09:42:51 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 09:47:17 2022 ] Mean training loss: 0.0141.
495
+ [ Thu Sep 8 09:47:17 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 09:47:17 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 09:55:08 2022 ] Epoch 92 Curr Acc: (34344/59477)57.74%
498
+ [ Thu Sep 8 09:55:08 2022 ] Epoch 53 Best Acc 57.85%
499
+ [ Thu Sep 8 09:55:08 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 09:55:08 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 09:59:34 2022 ] Mean training loss: 0.0136.
502
+ [ Thu Sep 8 09:59:34 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 09:59:34 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 10:07:25 2022 ] Epoch 93 Curr Acc: (34425/59477)57.88%
505
+ [ Thu Sep 8 10:07:25 2022 ] Epoch 93 Best Acc 57.88%
506
+ [ Thu Sep 8 10:07:25 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 10:07:25 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 10:11:52 2022 ] Mean training loss: 0.0139.
509
+ [ Thu Sep 8 10:11:52 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 10:11:52 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 10:19:43 2022 ] Epoch 94 Curr Acc: (34310/59477)57.69%
512
+ [ Thu Sep 8 10:19:43 2022 ] Epoch 93 Best Acc 57.88%
513
+ [ Thu Sep 8 10:19:43 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 10:19:43 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 10:24:09 2022 ] Mean training loss: 0.0134.
516
+ [ Thu Sep 8 10:24:09 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 10:24:09 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 10:32:00 2022 ] Epoch 95 Curr Acc: (32562/59477)54.75%
519
+ [ Thu Sep 8 10:32:00 2022 ] Epoch 93 Best Acc 57.88%
520
+ [ Thu Sep 8 10:32:00 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 10:32:00 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 10:36:26 2022 ] Mean training loss: 0.0124.
523
+ [ Thu Sep 8 10:36:26 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 10:36:26 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 10:44:17 2022 ] Epoch 96 Curr Acc: (34062/59477)57.27%
526
+ [ Thu Sep 8 10:44:17 2022 ] Epoch 93 Best Acc 57.88%
527
+ [ Thu Sep 8 10:44:17 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 10:44:17 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 10:48:43 2022 ] Mean training loss: 0.0130.
530
+ [ Thu Sep 8 10:48:43 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 10:48:43 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 10:56:34 2022 ] Epoch 97 Curr Acc: (34155/59477)57.43%
533
+ [ Thu Sep 8 10:56:34 2022 ] Epoch 93 Best Acc 57.88%
534
+ [ Thu Sep 8 10:56:34 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 10:56:34 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 11:01:01 2022 ] Mean training loss: 0.0125.
537
+ [ Thu Sep 8 11:01:01 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 11:01:01 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 11:08:52 2022 ] Epoch 98 Curr Acc: (34319/59477)57.70%
540
+ [ Thu Sep 8 11:08:52 2022 ] Epoch 93 Best Acc 57.88%
541
+ [ Thu Sep 8 11:08:52 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 11:08:52 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 11:13:18 2022 ] Mean training loss: 0.0125.
544
+ [ Thu Sep 8 11:13:18 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 11:13:18 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 11:21:10 2022 ] Epoch 99 Curr Acc: (34330/59477)57.72%
547
+ [ Thu Sep 8 11:21:10 2022 ] Epoch 93 Best Acc 57.88%
548
+ [ Thu Sep 8 11:21:10 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 11:21:10 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 11:25:36 2022 ] Mean training loss: 0.0134.
551
+ [ Thu Sep 8 11:25:36 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 11:25:36 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 11:33:27 2022 ] Epoch 100 Curr Acc: (33592/59477)56.48%
554
+ [ Thu Sep 8 11:33:27 2022 ] Epoch 93 Best Acc 57.88%
555
+ [ Thu Sep 8 11:33:28 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 11:33:28 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 11:37:54 2022 ] Mean training loss: 0.0127.
558
+ [ Thu Sep 8 11:37:54 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 11:37:54 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 11:45:45 2022 ] Epoch 101 Curr Acc: (34314/59477)57.69%
561
+ [ Thu Sep 8 11:45:45 2022 ] Epoch 93 Best Acc 57.88%
562
+ [ Thu Sep 8 11:45:45 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 11:45:45 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 11:50:12 2022 ] Mean training loss: 0.0129.
565
+ [ Thu Sep 8 11:50:12 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 11:50:12 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 11:58:03 2022 ] Epoch 102 Curr Acc: (34594/59477)58.16%
568
+ [ Thu Sep 8 11:58:03 2022 ] Epoch 102 Best Acc 58.16%
569
+ [ Thu Sep 8 11:58:03 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 11:58:03 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 12:02:30 2022 ] Mean training loss: 0.0132.
572
+ [ Thu Sep 8 12:02:30 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 12:02:30 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 12:10:21 2022 ] Epoch 103 Curr Acc: (33746/59477)56.74%
575
+ [ Thu Sep 8 12:10:21 2022 ] Epoch 102 Best Acc 58.16%
576
+ [ Thu Sep 8 12:10:21 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 12:10:21 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 12:14:48 2022 ] Mean training loss: 0.0133.
579
+ [ Thu Sep 8 12:14:48 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 12:14:48 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 12:22:39 2022 ] Epoch 104 Curr Acc: (34022/59477)57.20%
582
+ [ Thu Sep 8 12:22:39 2022 ] Epoch 102 Best Acc 58.16%
583
+ [ Thu Sep 8 12:22:39 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 12:22:39 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 12:27:06 2022 ] Mean training loss: 0.0124.
586
+ [ Thu Sep 8 12:27:06 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 12:27:06 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 12:34:57 2022 ] Epoch 105 Curr Acc: (34214/59477)57.52%
589
+ [ Thu Sep 8 12:34:57 2022 ] Epoch 102 Best Acc 58.16%
590
+ [ Thu Sep 8 12:34:57 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 12:34:57 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 12:39:24 2022 ] Mean training loss: 0.0126.
593
+ [ Thu Sep 8 12:39:24 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 12:39:24 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 12:47:15 2022 ] Epoch 106 Curr Acc: (34257/59477)57.60%
596
+ [ Thu Sep 8 12:47:15 2022 ] Epoch 102 Best Acc 58.16%
597
+ [ Thu Sep 8 12:47:15 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 12:47:15 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 12:51:42 2022 ] Mean training loss: 0.0123.
600
+ [ Thu Sep 8 12:51:42 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 12:51:42 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 12:59:33 2022 ] Epoch 107 Curr Acc: (33921/59477)57.03%
603
+ [ Thu Sep 8 12:59:33 2022 ] Epoch 102 Best Acc 58.16%
604
+ [ Thu Sep 8 12:59:33 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 12:59:33 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 13:04:00 2022 ] Mean training loss: 0.0124.
607
+ [ Thu Sep 8 13:04:00 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 13:04:00 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 13:11:52 2022 ] Epoch 108 Curr Acc: (34280/59477)57.64%
610
+ [ Thu Sep 8 13:11:52 2022 ] Epoch 102 Best Acc 58.16%
611
+ [ Thu Sep 8 13:11:52 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 13:11:52 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 13:16:18 2022 ] Mean training loss: 0.0128.
614
+ [ Thu Sep 8 13:16:18 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 13:16:18 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 13:24:10 2022 ] Epoch 109 Curr Acc: (34290/59477)57.65%
617
+ [ Thu Sep 8 13:24:10 2022 ] Epoch 102 Best Acc 58.16%
618
+ [ Thu Sep 8 13:24:10 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 13:24:10 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 13:28:36 2022 ] Mean training loss: 0.0126.
621
+ [ Thu Sep 8 13:28:36 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 13:28:36 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 13:36:27 2022 ] Epoch 110 Curr Acc: (34164/59477)57.44%
624
+ [ Thu Sep 8 13:36:27 2022 ] Epoch 102 Best Acc 58.16%
625
+ [ Thu Sep 8 13:36:27 2022 ] epoch: 102, best accuracy: 0.5816365990214705
626
+ [ Thu Sep 8 13:36:27 2022 ] Experiment: ./work_dir/ntu120/xset_jm
627
+ [ Thu Sep 8 13:36:27 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 13:36:27 2022 ] Load weights from ./runs/ntu120/xset_jm/runs-101-132294.pt.
629
+ [ Thu Sep 8 13:36:27 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 13:44:18 2022 ] Epoch 1 Curr Acc: (34594/59477)58.16%
631
+ [ Thu Sep 8 13:44:18 2022 ] Epoch 102 Best Acc 58.16%
ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xsub_b.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xsub_b/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xsub_b
ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:21e6be77baaebfe7bd8e924bcec2a71be07bea20e769b2e3999c7d38e5d33951
3
+ size 29946137
ckpt/Others/MST-GCN/ntu120_xsub/xsub_b/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:34:12 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:34:19 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xsub_b', 'model_saved_name': './runs/ntu120/xsub_b/runs', 'config': 'config/ntu120/xsub_b.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:34:19 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:34:19 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:40:53 2022 ] Mean training loss: 3.3579.
8
+ [ Wed Sep 7 21:40:53 2022 ] Time consumption: [Data]01%, [Network]99%
9
+ [ Wed Sep 7 21:40:53 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:40:53 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:47:29 2022 ] Mean training loss: 2.4453.
12
+ [ Wed Sep 7 21:47:29 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:47:29 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:47:29 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:54:04 2022 ] Mean training loss: 1.9786.
16
+ [ Wed Sep 7 21:54:04 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:54:04 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:54:04 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 22:00:40 2022 ] Mean training loss: 1.7189.
20
+ [ Wed Sep 7 22:00:40 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 22:00:41 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 22:00:41 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 22:07:16 2022 ] Mean training loss: 1.5571.
24
+ [ Wed Sep 7 22:07:16 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 22:07:16 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 22:07:16 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:13:52 2022 ] Mean training loss: 1.4579.
28
+ [ Wed Sep 7 22:13:52 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:13:52 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:13:52 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:20:27 2022 ] Mean training loss: 1.3949.
32
+ [ Wed Sep 7 22:20:27 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:20:27 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:20:27 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:27:02 2022 ] Mean training loss: 1.3628.
36
+ [ Wed Sep 7 22:27:02 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:27:02 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:27:02 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:33:37 2022 ] Mean training loss: 1.3236.
40
+ [ Wed Sep 7 22:33:37 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:33:37 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:33:37 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:40:12 2022 ] Mean training loss: 1.3199.
44
+ [ Wed Sep 7 22:40:12 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:40:12 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:40:12 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:46:48 2022 ] Mean training loss: 1.2531.
48
+ [ Wed Sep 7 22:46:48 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:46:48 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:46:48 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:53:23 2022 ] Mean training loss: 1.2180.
52
+ [ Wed Sep 7 22:53:23 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:53:23 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:53:23 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 22:59:59 2022 ] Mean training loss: 1.1717.
56
+ [ Wed Sep 7 22:59:59 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 22:59:59 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 22:59:59 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 23:06:34 2022 ] Mean training loss: 1.1578.
60
+ [ Wed Sep 7 23:06:34 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 23:06:34 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 23:06:34 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 23:13:10 2022 ] Mean training loss: 1.1288.
64
+ [ Wed Sep 7 23:13:10 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 23:13:10 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 23:13:10 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 23:19:46 2022 ] Mean training loss: 1.1052.
68
+ [ Wed Sep 7 23:19:46 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 23:19:46 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 23:19:46 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 23:26:21 2022 ] Mean training loss: 1.0852.
72
+ [ Wed Sep 7 23:26:21 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 23:26:21 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 23:26:21 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 23:32:57 2022 ] Mean training loss: 1.0627.
76
+ [ Wed Sep 7 23:32:57 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 23:32:57 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 23:32:57 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 23:39:33 2022 ] Mean training loss: 1.0540.
80
+ [ Wed Sep 7 23:39:33 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 23:39:33 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 23:39:33 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:46:08 2022 ] Mean training loss: 1.0431.
84
+ [ Wed Sep 7 23:46:08 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:46:08 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:46:08 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:52:43 2022 ] Mean training loss: 1.0358.
88
+ [ Wed Sep 7 23:52:43 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:52:43 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:52:43 2022 ] Learning rate: 0.15
91
+ [ Wed Sep 7 23:59:19 2022 ] Mean training loss: 1.0118.
92
+ [ Wed Sep 7 23:59:19 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Wed Sep 7 23:59:19 2022 ] Training epoch: 23
94
+ [ Wed Sep 7 23:59:19 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 00:05:54 2022 ] Mean training loss: 1.0057.
96
+ [ Thu Sep 8 00:05:54 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Thu Sep 8 00:05:54 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 00:05:54 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 00:12:31 2022 ] Mean training loss: 1.0026.
100
+ [ Thu Sep 8 00:12:31 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Thu Sep 8 00:12:31 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 00:12:31 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 00:19:08 2022 ] Mean training loss: 0.9769.
104
+ [ Thu Sep 8 00:19:08 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Thu Sep 8 00:19:08 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 00:19:08 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 00:25:43 2022 ] Mean training loss: 0.9739.
108
+ [ Thu Sep 8 00:25:43 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Thu Sep 8 00:25:43 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 00:25:43 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 00:32:20 2022 ] Mean training loss: 0.9803.
112
+ [ Thu Sep 8 00:32:20 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Thu Sep 8 00:32:20 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 00:32:20 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 00:38:55 2022 ] Mean training loss: 0.9674.
116
+ [ Thu Sep 8 00:38:55 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Thu Sep 8 00:38:55 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 00:38:55 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 00:45:31 2022 ] Mean training loss: 0.9484.
120
+ [ Thu Sep 8 00:45:31 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Thu Sep 8 00:45:31 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 00:45:31 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 00:52:07 2022 ] Mean training loss: 0.9565.
124
+ [ Thu Sep 8 00:52:07 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Thu Sep 8 00:52:07 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 00:52:07 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 00:58:42 2022 ] Mean training loss: 0.9433.
128
+ [ Thu Sep 8 00:58:42 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Thu Sep 8 00:58:42 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 00:58:42 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 01:05:17 2022 ] Mean training loss: 0.9634.
132
+ [ Thu Sep 8 01:05:17 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Thu Sep 8 01:05:17 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 01:05:17 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 01:11:52 2022 ] Mean training loss: 0.9298.
136
+ [ Thu Sep 8 01:11:52 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 01:11:52 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 01:11:52 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 01:18:27 2022 ] Mean training loss: 0.9412.
140
+ [ Thu Sep 8 01:18:27 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 01:18:27 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 01:18:27 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 01:25:03 2022 ] Mean training loss: 0.9290.
144
+ [ Thu Sep 8 01:25:03 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 01:25:03 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 01:25:03 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 01:31:39 2022 ] Mean training loss: 0.9283.
148
+ [ Thu Sep 8 01:31:39 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 01:31:39 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 01:31:39 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 01:38:14 2022 ] Mean training loss: 0.9286.
152
+ [ Thu Sep 8 01:38:14 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 01:38:14 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 01:38:14 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 01:44:49 2022 ] Mean training loss: 0.9283.
156
+ [ Thu Sep 8 01:44:49 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 01:44:49 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 01:44:49 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 01:51:25 2022 ] Mean training loss: 0.9218.
160
+ [ Thu Sep 8 01:51:25 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 01:51:25 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 01:51:25 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 01:58:00 2022 ] Mean training loss: 0.9245.
164
+ [ Thu Sep 8 01:58:00 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 01:58:00 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 01:58:00 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 02:04:35 2022 ] Mean training loss: 0.9271.
168
+ [ Thu Sep 8 02:04:35 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 02:04:35 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 02:04:35 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 02:11:10 2022 ] Mean training loss: 0.9300.
172
+ [ Thu Sep 8 02:11:10 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 02:11:10 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 02:11:10 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 02:17:46 2022 ] Mean training loss: 0.9108.
176
+ [ Thu Sep 8 02:17:46 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 02:17:46 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 02:17:46 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 02:24:21 2022 ] Mean training loss: 0.9156.
180
+ [ Thu Sep 8 02:24:21 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 02:24:21 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 02:24:21 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 02:30:56 2022 ] Mean training loss: 0.9297.
184
+ [ Thu Sep 8 02:30:56 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 02:30:56 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 02:30:56 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 02:37:30 2022 ] Mean training loss: 0.9178.
188
+ [ Thu Sep 8 02:37:30 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 02:37:30 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 02:37:30 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 02:44:05 2022 ] Mean training loss: 0.9152.
192
+ [ Thu Sep 8 02:44:05 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 02:44:05 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 02:44:05 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 02:50:42 2022 ] Mean training loss: 0.9138.
196
+ [ Thu Sep 8 02:50:42 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 02:50:42 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 02:50:42 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 02:57:20 2022 ] Mean training loss: 0.9059.
200
+ [ Thu Sep 8 02:57:20 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 02:57:20 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 02:57:20 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 03:03:57 2022 ] Mean training loss: 0.9049.
204
+ [ Thu Sep 8 03:03:57 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 03:03:57 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 03:03:57 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 03:10:35 2022 ] Mean training loss: 0.4705.
208
+ [ Thu Sep 8 03:10:35 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 03:10:35 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 03:17:25 2022 ] Epoch 51 Curr Acc: (28300/50919)55.58%
211
+ [ Thu Sep 8 03:17:25 2022 ] Epoch 51 Best Acc 55.58%
212
+ [ Thu Sep 8 03:17:25 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 03:17:25 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 03:24:00 2022 ] Mean training loss: 0.3493.
215
+ [ Thu Sep 8 03:24:00 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 03:24:00 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 03:30:44 2022 ] Epoch 52 Curr Acc: (29876/50919)58.67%
218
+ [ Thu Sep 8 03:30:44 2022 ] Epoch 52 Best Acc 58.67%
219
+ [ Thu Sep 8 03:30:44 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 03:30:44 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 03:37:19 2022 ] Mean training loss: 0.2904.
222
+ [ Thu Sep 8 03:37:19 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 03:37:19 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 03:44:02 2022 ] Epoch 53 Curr Acc: (29712/50919)58.35%
225
+ [ Thu Sep 8 03:44:02 2022 ] Epoch 52 Best Acc 58.67%
226
+ [ Thu Sep 8 03:44:02 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 03:44:02 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 03:50:37 2022 ] Mean training loss: 0.2570.
229
+ [ Thu Sep 8 03:50:37 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 03:50:37 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 03:57:20 2022 ] Epoch 54 Curr Acc: (29786/50919)58.50%
232
+ [ Thu Sep 8 03:57:20 2022 ] Epoch 52 Best Acc 58.67%
233
+ [ Thu Sep 8 03:57:20 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 03:57:20 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 04:03:56 2022 ] Mean training loss: 0.2286.
236
+ [ Thu Sep 8 04:03:56 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 04:03:56 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 04:10:39 2022 ] Epoch 55 Curr Acc: (29113/50919)57.18%
239
+ [ Thu Sep 8 04:10:39 2022 ] Epoch 52 Best Acc 58.67%
240
+ [ Thu Sep 8 04:10:39 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 04:10:39 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 04:17:14 2022 ] Mean training loss: 0.2013.
243
+ [ Thu Sep 8 04:17:14 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 04:17:14 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 04:23:57 2022 ] Epoch 56 Curr Acc: (30500/50919)59.90%
246
+ [ Thu Sep 8 04:23:57 2022 ] Epoch 56 Best Acc 59.90%
247
+ [ Thu Sep 8 04:23:57 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 04:23:57 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 04:30:32 2022 ] Mean training loss: 0.1756.
250
+ [ Thu Sep 8 04:30:32 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 04:30:32 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 04:37:16 2022 ] Epoch 57 Curr Acc: (29922/50919)58.76%
253
+ [ Thu Sep 8 04:37:16 2022 ] Epoch 56 Best Acc 59.90%
254
+ [ Thu Sep 8 04:37:16 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 04:37:16 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 04:43:52 2022 ] Mean training loss: 0.1621.
257
+ [ Thu Sep 8 04:43:52 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 04:43:52 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 04:50:35 2022 ] Epoch 58 Curr Acc: (29356/50919)57.65%
260
+ [ Thu Sep 8 04:50:35 2022 ] Epoch 56 Best Acc 59.90%
261
+ [ Thu Sep 8 04:50:35 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 04:50:35 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 04:57:10 2022 ] Mean training loss: 0.1457.
264
+ [ Thu Sep 8 04:57:10 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 04:57:10 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 05:03:53 2022 ] Epoch 59 Curr Acc: (29717/50919)58.36%
267
+ [ Thu Sep 8 05:03:53 2022 ] Epoch 56 Best Acc 59.90%
268
+ [ Thu Sep 8 05:03:53 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 05:03:53 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 05:10:28 2022 ] Mean training loss: 0.1316.
271
+ [ Thu Sep 8 05:10:28 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 05:10:29 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 05:17:11 2022 ] Epoch 60 Curr Acc: (29844/50919)58.61%
274
+ [ Thu Sep 8 05:17:11 2022 ] Epoch 56 Best Acc 59.90%
275
+ [ Thu Sep 8 05:17:11 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 05:17:11 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 05:23:47 2022 ] Mean training loss: 0.1264.
278
+ [ Thu Sep 8 05:23:47 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 05:23:47 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 05:30:30 2022 ] Epoch 61 Curr Acc: (29379/50919)57.70%
281
+ [ Thu Sep 8 05:30:30 2022 ] Epoch 56 Best Acc 59.90%
282
+ [ Thu Sep 8 05:30:30 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 05:30:30 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 05:37:06 2022 ] Mean training loss: 0.1152.
285
+ [ Thu Sep 8 05:37:06 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 05:37:06 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 05:43:49 2022 ] Epoch 62 Curr Acc: (29449/50919)57.83%
288
+ [ Thu Sep 8 05:43:49 2022 ] Epoch 56 Best Acc 59.90%
289
+ [ Thu Sep 8 05:43:49 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 05:43:49 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 05:50:24 2022 ] Mean training loss: 0.1107.
292
+ [ Thu Sep 8 05:50:24 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 05:50:24 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 05:57:07 2022 ] Epoch 63 Curr Acc: (29466/50919)57.87%
295
+ [ Thu Sep 8 05:57:07 2022 ] Epoch 56 Best Acc 59.90%
296
+ [ Thu Sep 8 05:57:07 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 05:57:07 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 06:03:43 2022 ] Mean training loss: 0.1082.
299
+ [ Thu Sep 8 06:03:43 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 06:03:43 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 06:10:26 2022 ] Epoch 64 Curr Acc: (28773/50919)56.51%
302
+ [ Thu Sep 8 06:10:26 2022 ] Epoch 56 Best Acc 59.90%
303
+ [ Thu Sep 8 06:10:26 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 06:10:26 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 06:17:01 2022 ] Mean training loss: 0.1056.
306
+ [ Thu Sep 8 06:17:01 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 06:17:02 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 06:23:44 2022 ] Epoch 65 Curr Acc: (28107/50919)55.20%
309
+ [ Thu Sep 8 06:23:44 2022 ] Epoch 56 Best Acc 59.90%
310
+ [ Thu Sep 8 06:23:44 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 06:23:44 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 06:30:19 2022 ] Mean training loss: 0.1126.
313
+ [ Thu Sep 8 06:30:19 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 06:30:19 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 06:37:02 2022 ] Epoch 66 Curr Acc: (28915/50919)56.79%
316
+ [ Thu Sep 8 06:37:02 2022 ] Epoch 56 Best Acc 59.90%
317
+ [ Thu Sep 8 06:37:02 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 06:37:02 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 06:43:37 2022 ] Mean training loss: 0.1096.
320
+ [ Thu Sep 8 06:43:37 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 06:43:37 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 06:50:20 2022 ] Epoch 67 Curr Acc: (27970/50919)54.93%
323
+ [ Thu Sep 8 06:50:20 2022 ] Epoch 56 Best Acc 59.90%
324
+ [ Thu Sep 8 06:50:20 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 06:50:20 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 06:56:56 2022 ] Mean training loss: 0.1034.
327
+ [ Thu Sep 8 06:56:56 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 06:56:56 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 07:03:39 2022 ] Epoch 68 Curr Acc: (28812/50919)56.58%
330
+ [ Thu Sep 8 07:03:39 2022 ] Epoch 56 Best Acc 59.90%
331
+ [ Thu Sep 8 07:03:39 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 07:03:39 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 07:10:14 2022 ] Mean training loss: 0.1027.
334
+ [ Thu Sep 8 07:10:14 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 07:10:14 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 07:16:56 2022 ] Epoch 69 Curr Acc: (28696/50919)56.36%
337
+ [ Thu Sep 8 07:16:56 2022 ] Epoch 56 Best Acc 59.90%
338
+ [ Thu Sep 8 07:16:56 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 07:16:56 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 07:23:31 2022 ] Mean training loss: 0.1057.
341
+ [ Thu Sep 8 07:23:31 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 07:23:31 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 07:30:14 2022 ] Epoch 70 Curr Acc: (27995/50919)54.98%
344
+ [ Thu Sep 8 07:30:14 2022 ] Epoch 56 Best Acc 59.90%
345
+ [ Thu Sep 8 07:30:14 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 07:30:14 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 07:36:49 2022 ] Mean training loss: 0.0602.
348
+ [ Thu Sep 8 07:36:49 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 07:36:49 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 07:43:32 2022 ] Epoch 71 Curr Acc: (29615/50919)58.16%
351
+ [ Thu Sep 8 07:43:32 2022 ] Epoch 56 Best Acc 59.90%
352
+ [ Thu Sep 8 07:43:32 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 07:43:32 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 07:50:08 2022 ] Mean training loss: 0.0393.
355
+ [ Thu Sep 8 07:50:08 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 07:50:08 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 07:56:50 2022 ] Epoch 72 Curr Acc: (29373/50919)57.69%
358
+ [ Thu Sep 8 07:56:50 2022 ] Epoch 56 Best Acc 59.90%
359
+ [ Thu Sep 8 07:56:50 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 07:56:50 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 08:03:26 2022 ] Mean training loss: 0.0340.
362
+ [ Thu Sep 8 08:03:26 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 08:03:26 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 08:10:08 2022 ] Epoch 73 Curr Acc: (29514/50919)57.96%
365
+ [ Thu Sep 8 08:10:08 2022 ] Epoch 56 Best Acc 59.90%
366
+ [ Thu Sep 8 08:10:08 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 08:10:08 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 08:16:44 2022 ] Mean training loss: 0.0307.
369
+ [ Thu Sep 8 08:16:44 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 08:16:44 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 08:23:26 2022 ] Epoch 74 Curr Acc: (30247/50919)59.40%
372
+ [ Thu Sep 8 08:23:26 2022 ] Epoch 56 Best Acc 59.90%
373
+ [ Thu Sep 8 08:23:26 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 08:23:26 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 08:30:01 2022 ] Mean training loss: 0.0268.
376
+ [ Thu Sep 8 08:30:01 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 08:30:01 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 08:36:44 2022 ] Epoch 75 Curr Acc: (29983/50919)58.88%
379
+ [ Thu Sep 8 08:36:44 2022 ] Epoch 56 Best Acc 59.90%
380
+ [ Thu Sep 8 08:36:44 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 08:36:44 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 08:43:19 2022 ] Mean training loss: 0.0262.
383
+ [ Thu Sep 8 08:43:19 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 08:43:19 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 08:50:02 2022 ] Epoch 76 Curr Acc: (29562/50919)58.06%
386
+ [ Thu Sep 8 08:50:02 2022 ] Epoch 56 Best Acc 59.90%
387
+ [ Thu Sep 8 08:50:02 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 08:50:02 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 08:56:37 2022 ] Mean training loss: 0.0256.
390
+ [ Thu Sep 8 08:56:37 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 08:56:37 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 09:03:20 2022 ] Epoch 77 Curr Acc: (28726/50919)56.42%
393
+ [ Thu Sep 8 09:03:20 2022 ] Epoch 56 Best Acc 59.90%
394
+ [ Thu Sep 8 09:03:20 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 09:03:20 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 09:09:55 2022 ] Mean training loss: 0.0236.
397
+ [ Thu Sep 8 09:09:55 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 09:09:55 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 09:16:37 2022 ] Epoch 78 Curr Acc: (29965/50919)58.85%
400
+ [ Thu Sep 8 09:16:37 2022 ] Epoch 56 Best Acc 59.90%
401
+ [ Thu Sep 8 09:16:37 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 09:16:37 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 09:23:12 2022 ] Mean training loss: 0.0202.
404
+ [ Thu Sep 8 09:23:12 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 09:23:12 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 09:29:55 2022 ] Epoch 79 Curr Acc: (29629/50919)58.19%
407
+ [ Thu Sep 8 09:29:55 2022 ] Epoch 56 Best Acc 59.90%
408
+ [ Thu Sep 8 09:29:55 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 09:29:55 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 09:36:30 2022 ] Mean training loss: 0.0205.
411
+ [ Thu Sep 8 09:36:30 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 09:36:30 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 09:43:12 2022 ] Epoch 80 Curr Acc: (30249/50919)59.41%
414
+ [ Thu Sep 8 09:43:12 2022 ] Epoch 56 Best Acc 59.90%
415
+ [ Thu Sep 8 09:43:12 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 09:43:12 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 09:49:47 2022 ] Mean training loss: 0.0199.
418
+ [ Thu Sep 8 09:49:47 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 09:49:47 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 09:56:30 2022 ] Epoch 81 Curr Acc: (30134/50919)59.18%
421
+ [ Thu Sep 8 09:56:30 2022 ] Epoch 56 Best Acc 59.90%
422
+ [ Thu Sep 8 09:56:30 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 09:56:30 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 10:03:05 2022 ] Mean training loss: 0.0194.
425
+ [ Thu Sep 8 10:03:05 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 10:03:05 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 10:09:48 2022 ] Epoch 82 Curr Acc: (30251/50919)59.41%
428
+ [ Thu Sep 8 10:09:48 2022 ] Epoch 56 Best Acc 59.90%
429
+ [ Thu Sep 8 10:09:48 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 10:09:48 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 10:16:24 2022 ] Mean training loss: 0.0175.
432
+ [ Thu Sep 8 10:16:24 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 10:16:24 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 10:23:07 2022 ] Epoch 83 Curr Acc: (29964/50919)58.85%
435
+ [ Thu Sep 8 10:23:07 2022 ] Epoch 56 Best Acc 59.90%
436
+ [ Thu Sep 8 10:23:07 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 10:23:07 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 10:29:43 2022 ] Mean training loss: 0.0183.
439
+ [ Thu Sep 8 10:29:43 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 10:29:43 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 10:36:26 2022 ] Epoch 84 Curr Acc: (29913/50919)58.75%
442
+ [ Thu Sep 8 10:36:26 2022 ] Epoch 56 Best Acc 59.90%
443
+ [ Thu Sep 8 10:36:26 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 10:36:26 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 10:43:01 2022 ] Mean training loss: 0.0177.
446
+ [ Thu Sep 8 10:43:01 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 10:43:02 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 10:49:45 2022 ] Epoch 85 Curr Acc: (29971/50919)58.86%
449
+ [ Thu Sep 8 10:49:45 2022 ] Epoch 56 Best Acc 59.90%
450
+ [ Thu Sep 8 10:49:45 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 10:49:45 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 10:56:20 2022 ] Mean training loss: 0.0173.
453
+ [ Thu Sep 8 10:56:20 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 10:56:20 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 11:03:03 2022 ] Epoch 86 Curr Acc: (29880/50919)58.68%
456
+ [ Thu Sep 8 11:03:03 2022 ] Epoch 56 Best Acc 59.90%
457
+ [ Thu Sep 8 11:03:03 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 11:03:03 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 11:09:39 2022 ] Mean training loss: 0.0179.
460
+ [ Thu Sep 8 11:09:39 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 11:09:39 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 11:16:23 2022 ] Epoch 87 Curr Acc: (30201/50919)59.31%
463
+ [ Thu Sep 8 11:16:23 2022 ] Epoch 56 Best Acc 59.90%
464
+ [ Thu Sep 8 11:16:23 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 11:16:23 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 11:22:59 2022 ] Mean training loss: 0.0164.
467
+ [ Thu Sep 8 11:22:59 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 11:22:59 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 11:29:42 2022 ] Epoch 88 Curr Acc: (30104/50919)59.12%
470
+ [ Thu Sep 8 11:29:42 2022 ] Epoch 56 Best Acc 59.90%
471
+ [ Thu Sep 8 11:29:42 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 11:29:42 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 11:36:17 2022 ] Mean training loss: 0.0170.
474
+ [ Thu Sep 8 11:36:17 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 11:36:17 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 11:43:01 2022 ] Epoch 89 Curr Acc: (30149/50919)59.21%
477
+ [ Thu Sep 8 11:43:01 2022 ] Epoch 56 Best Acc 59.90%
478
+ [ Thu Sep 8 11:43:01 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 11:43:01 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 11:49:36 2022 ] Mean training loss: 0.0174.
481
+ [ Thu Sep 8 11:49:36 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 11:49:36 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 11:56:19 2022 ] Epoch 90 Curr Acc: (29425/50919)57.79%
484
+ [ Thu Sep 8 11:56:19 2022 ] Epoch 56 Best Acc 59.90%
485
+ [ Thu Sep 8 11:56:20 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 11:56:20 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 12:02:55 2022 ] Mean training loss: 0.0153.
488
+ [ Thu Sep 8 12:02:55 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 12:02:55 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 12:09:38 2022 ] Epoch 91 Curr Acc: (30085/50919)59.08%
491
+ [ Thu Sep 8 12:09:38 2022 ] Epoch 56 Best Acc 59.90%
492
+ [ Thu Sep 8 12:09:38 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 12:09:38 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 12:16:14 2022 ] Mean training loss: 0.0160.
495
+ [ Thu Sep 8 12:16:14 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 12:16:14 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 12:22:57 2022 ] Epoch 92 Curr Acc: (29728/50919)58.38%
498
+ [ Thu Sep 8 12:22:57 2022 ] Epoch 56 Best Acc 59.90%
499
+ [ Thu Sep 8 12:22:57 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 12:22:57 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 12:29:32 2022 ] Mean training loss: 0.0155.
502
+ [ Thu Sep 8 12:29:32 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 12:29:32 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 12:36:15 2022 ] Epoch 93 Curr Acc: (29908/50919)58.74%
505
+ [ Thu Sep 8 12:36:15 2022 ] Epoch 56 Best Acc 59.90%
506
+ [ Thu Sep 8 12:36:15 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 12:36:15 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 12:42:50 2022 ] Mean training loss: 0.0155.
509
+ [ Thu Sep 8 12:42:50 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 12:42:50 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 12:49:33 2022 ] Epoch 94 Curr Acc: (30040/50919)59.00%
512
+ [ Thu Sep 8 12:49:33 2022 ] Epoch 56 Best Acc 59.90%
513
+ [ Thu Sep 8 12:49:33 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 12:49:33 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 12:56:08 2022 ] Mean training loss: 0.0153.
516
+ [ Thu Sep 8 12:56:08 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 12:56:08 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 13:02:51 2022 ] Epoch 95 Curr Acc: (29757/50919)58.44%
519
+ [ Thu Sep 8 13:02:51 2022 ] Epoch 56 Best Acc 59.90%
520
+ [ Thu Sep 8 13:02:51 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 13:02:51 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 13:09:26 2022 ] Mean training loss: 0.0157.
523
+ [ Thu Sep 8 13:09:26 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 13:09:26 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 13:16:09 2022 ] Epoch 96 Curr Acc: (29844/50919)58.61%
526
+ [ Thu Sep 8 13:16:09 2022 ] Epoch 56 Best Acc 59.90%
527
+ [ Thu Sep 8 13:16:09 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 13:16:09 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 13:22:44 2022 ] Mean training loss: 0.0158.
530
+ [ Thu Sep 8 13:22:44 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 13:22:44 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 13:29:27 2022 ] Epoch 97 Curr Acc: (30111/50919)59.14%
533
+ [ Thu Sep 8 13:29:27 2022 ] Epoch 56 Best Acc 59.90%
534
+ [ Thu Sep 8 13:29:27 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 13:29:27 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 13:36:03 2022 ] Mean training loss: 0.0149.
537
+ [ Thu Sep 8 13:36:03 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 13:36:03 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 13:42:46 2022 ] Epoch 98 Curr Acc: (30025/50919)58.97%
540
+ [ Thu Sep 8 13:42:46 2022 ] Epoch 56 Best Acc 59.90%
541
+ [ Thu Sep 8 13:42:46 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 13:42:46 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 13:49:22 2022 ] Mean training loss: 0.0149.
544
+ [ Thu Sep 8 13:49:22 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 13:49:22 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 13:56:04 2022 ] Epoch 99 Curr Acc: (29153/50919)57.25%
547
+ [ Thu Sep 8 13:56:04 2022 ] Epoch 56 Best Acc 59.90%
548
+ [ Thu Sep 8 13:56:05 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 13:56:05 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 14:02:39 2022 ] Mean training loss: 0.0151.
551
+ [ Thu Sep 8 14:02:39 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 14:02:39 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 14:09:21 2022 ] Epoch 100 Curr Acc: (30215/50919)59.34%
554
+ [ Thu Sep 8 14:09:21 2022 ] Epoch 56 Best Acc 59.90%
555
+ [ Thu Sep 8 14:09:21 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 14:09:21 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 14:15:56 2022 ] Mean training loss: 0.0152.
558
+ [ Thu Sep 8 14:15:56 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 14:15:56 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 14:22:39 2022 ] Epoch 101 Curr Acc: (29818/50919)58.56%
561
+ [ Thu Sep 8 14:22:39 2022 ] Epoch 56 Best Acc 59.90%
562
+ [ Thu Sep 8 14:22:39 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 14:22:39 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 14:29:14 2022 ] Mean training loss: 0.0155.
565
+ [ Thu Sep 8 14:29:14 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 14:29:14 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 14:35:56 2022 ] Epoch 102 Curr Acc: (29704/50919)58.34%
568
+ [ Thu Sep 8 14:35:56 2022 ] Epoch 56 Best Acc 59.90%
569
+ [ Thu Sep 8 14:35:56 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 14:35:56 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 14:42:31 2022 ] Mean training loss: 0.0153.
572
+ [ Thu Sep 8 14:42:31 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 14:42:31 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 14:49:14 2022 ] Epoch 103 Curr Acc: (30147/50919)59.21%
575
+ [ Thu Sep 8 14:49:14 2022 ] Epoch 56 Best Acc 59.90%
576
+ [ Thu Sep 8 14:49:14 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 14:49:14 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 14:55:48 2022 ] Mean training loss: 0.0139.
579
+ [ Thu Sep 8 14:55:48 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 14:55:48 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 15:02:31 2022 ] Epoch 104 Curr Acc: (30201/50919)59.31%
582
+ [ Thu Sep 8 15:02:31 2022 ] Epoch 56 Best Acc 59.90%
583
+ [ Thu Sep 8 15:02:31 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 15:02:31 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 15:09:05 2022 ] Mean training loss: 0.0146.
586
+ [ Thu Sep 8 15:09:05 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 15:09:05 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 15:15:48 2022 ] Epoch 105 Curr Acc: (29992/50919)58.90%
589
+ [ Thu Sep 8 15:15:48 2022 ] Epoch 56 Best Acc 59.90%
590
+ [ Thu Sep 8 15:15:48 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 15:15:48 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 15:22:22 2022 ] Mean training loss: 0.0144.
593
+ [ Thu Sep 8 15:22:22 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 15:22:22 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 15:29:04 2022 ] Epoch 106 Curr Acc: (30148/50919)59.21%
596
+ [ Thu Sep 8 15:29:04 2022 ] Epoch 56 Best Acc 59.90%
597
+ [ Thu Sep 8 15:29:04 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 15:29:04 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 15:35:39 2022 ] Mean training loss: 0.0143.
600
+ [ Thu Sep 8 15:35:39 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 15:35:39 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 15:42:22 2022 ] Epoch 107 Curr Acc: (29915/50919)58.75%
603
+ [ Thu Sep 8 15:42:22 2022 ] Epoch 56 Best Acc 59.90%
604
+ [ Thu Sep 8 15:42:22 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 15:42:22 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 15:48:55 2022 ] Mean training loss: 0.0157.
607
+ [ Thu Sep 8 15:48:55 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 15:48:55 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 15:55:37 2022 ] Epoch 108 Curr Acc: (30345/50919)59.59%
610
+ [ Thu Sep 8 15:55:37 2022 ] Epoch 56 Best Acc 59.90%
611
+ [ Thu Sep 8 15:55:37 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 15:55:37 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 16:02:12 2022 ] Mean training loss: 0.0140.
614
+ [ Thu Sep 8 16:02:12 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 16:02:12 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 16:08:54 2022 ] Epoch 109 Curr Acc: (29940/50919)58.80%
617
+ [ Thu Sep 8 16:08:54 2022 ] Epoch 56 Best Acc 59.90%
618
+ [ Thu Sep 8 16:08:54 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 16:08:54 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 16:15:29 2022 ] Mean training loss: 0.0155.
621
+ [ Thu Sep 8 16:15:29 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 16:15:29 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 16:22:12 2022 ] Epoch 110 Curr Acc: (29913/50919)58.75%
624
+ [ Thu Sep 8 16:22:12 2022 ] Epoch 56 Best Acc 59.90%
625
+ [ Thu Sep 8 16:22:12 2022 ] epoch: 56, best accuracy: 0.5989905536243838
626
+ [ Thu Sep 8 16:22:12 2022 ] Experiment: ./work_dir/ntu120/xsub_b
627
+ [ Thu Sep 8 16:22:12 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 16:22:12 2022 ] Load weights from ./runs/ntu120/xsub_b/runs-55-109200.pt.
629
+ [ Thu Sep 8 16:22:12 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 16:28:54 2022 ] Epoch 1 Curr Acc: (30500/50919)59.90%
631
+ [ Thu Sep 8 16:28:54 2022 ] Epoch 56 Best Acc 59.90%
ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xsub_bm.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xsub_bm/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xsub_bm
ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:5ed3be9094c4a76dd87e77bea5421bd2466a9547d072311f8ef389544269d6c1
3
+ size 29946137
ckpt/Others/MST-GCN/ntu120_xsub/xsub_bm/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:34:40 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:34:40 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xsub_bm', 'model_saved_name': './runs/ntu120/xsub_bm/runs', 'config': 'config/ntu120/xsub_bm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:34:40 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:34:40 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:41:19 2022 ] Mean training loss: 3.5177.
8
+ [ Wed Sep 7 21:41:19 2022 ] Time consumption: [Data]01%, [Network]99%
9
+ [ Wed Sep 7 21:41:19 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:41:19 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:47:58 2022 ] Mean training loss: 2.4868.
12
+ [ Wed Sep 7 21:47:58 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:47:58 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:47:58 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:54:36 2022 ] Mean training loss: 1.9641.
16
+ [ Wed Sep 7 21:54:36 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:54:36 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:54:36 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 22:01:14 2022 ] Mean training loss: 1.6675.
20
+ [ Wed Sep 7 22:01:14 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 22:01:14 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 22:01:14 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 22:07:53 2022 ] Mean training loss: 1.5121.
24
+ [ Wed Sep 7 22:07:53 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 22:07:53 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 22:07:53 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:14:31 2022 ] Mean training loss: 1.4097.
28
+ [ Wed Sep 7 22:14:31 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:14:31 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:14:31 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:21:09 2022 ] Mean training loss: 1.3619.
32
+ [ Wed Sep 7 22:21:09 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:21:09 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:21:09 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:27:46 2022 ] Mean training loss: 1.3424.
36
+ [ Wed Sep 7 22:27:46 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:27:46 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:27:46 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:34:25 2022 ] Mean training loss: 1.3057.
40
+ [ Wed Sep 7 22:34:25 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:34:25 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:34:25 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:41:02 2022 ] Mean training loss: 1.3086.
44
+ [ Wed Sep 7 22:41:02 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:41:02 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:41:02 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:47:42 2022 ] Mean training loss: 1.2515.
48
+ [ Wed Sep 7 22:47:42 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:47:42 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:47:42 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:54:20 2022 ] Mean training loss: 1.2029.
52
+ [ Wed Sep 7 22:54:20 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:54:20 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:54:20 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 23:01:00 2022 ] Mean training loss: 1.1772.
56
+ [ Wed Sep 7 23:01:00 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 23:01:00 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 23:01:00 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 23:07:38 2022 ] Mean training loss: 1.1704.
60
+ [ Wed Sep 7 23:07:38 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 23:07:38 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 23:07:38 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 23:14:16 2022 ] Mean training loss: 1.1350.
64
+ [ Wed Sep 7 23:14:16 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 23:14:16 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 23:14:16 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 23:20:54 2022 ] Mean training loss: 1.1048.
68
+ [ Wed Sep 7 23:20:54 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 23:20:54 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 23:20:54 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 23:27:33 2022 ] Mean training loss: 1.0995.
72
+ [ Wed Sep 7 23:27:33 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 23:27:33 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 23:27:33 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 23:34:11 2022 ] Mean training loss: 1.0836.
76
+ [ Wed Sep 7 23:34:11 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 23:34:11 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 23:34:11 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 23:40:50 2022 ] Mean training loss: 1.0612.
80
+ [ Wed Sep 7 23:40:50 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 23:40:50 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 23:40:50 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:47:30 2022 ] Mean training loss: 1.0556.
84
+ [ Wed Sep 7 23:47:30 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:47:30 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:47:30 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:54:08 2022 ] Mean training loss: 1.0510.
88
+ [ Wed Sep 7 23:54:08 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:54:08 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:54:08 2022 ] Learning rate: 0.15
91
+ [ Thu Sep 8 00:00:47 2022 ] Mean training loss: 1.0254.
92
+ [ Thu Sep 8 00:00:47 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Thu Sep 8 00:00:47 2022 ] Training epoch: 23
94
+ [ Thu Sep 8 00:00:47 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 00:07:26 2022 ] Mean training loss: 1.0141.
96
+ [ Thu Sep 8 00:07:26 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Thu Sep 8 00:07:26 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 00:07:26 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 00:14:05 2022 ] Mean training loss: 1.0105.
100
+ [ Thu Sep 8 00:14:05 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Thu Sep 8 00:14:05 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 00:14:05 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 00:20:45 2022 ] Mean training loss: 0.9963.
104
+ [ Thu Sep 8 00:20:45 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Thu Sep 8 00:20:45 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 00:20:45 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 00:27:23 2022 ] Mean training loss: 1.0034.
108
+ [ Thu Sep 8 00:27:23 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Thu Sep 8 00:27:23 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 00:27:23 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 00:34:03 2022 ] Mean training loss: 0.9867.
112
+ [ Thu Sep 8 00:34:03 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Thu Sep 8 00:34:03 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 00:34:03 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 00:40:42 2022 ] Mean training loss: 1.0034.
116
+ [ Thu Sep 8 00:40:42 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Thu Sep 8 00:40:42 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 00:40:42 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 00:47:21 2022 ] Mean training loss: 0.9684.
120
+ [ Thu Sep 8 00:47:21 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Thu Sep 8 00:47:21 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 00:47:21 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 00:53:59 2022 ] Mean training loss: 0.9827.
124
+ [ Thu Sep 8 00:53:59 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Thu Sep 8 00:53:59 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 00:53:59 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 01:00:38 2022 ] Mean training loss: 0.9715.
128
+ [ Thu Sep 8 01:00:38 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Thu Sep 8 01:00:38 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 01:00:38 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 01:07:17 2022 ] Mean training loss: 0.9609.
132
+ [ Thu Sep 8 01:07:17 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Thu Sep 8 01:07:17 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 01:07:17 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 01:13:56 2022 ] Mean training loss: 0.9581.
136
+ [ Thu Sep 8 01:13:56 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 01:13:56 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 01:13:56 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 01:20:35 2022 ] Mean training loss: 0.9673.
140
+ [ Thu Sep 8 01:20:35 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 01:20:35 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 01:20:35 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 01:27:17 2022 ] Mean training loss: 0.9638.
144
+ [ Thu Sep 8 01:27:17 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 01:27:17 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 01:27:17 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 01:33:58 2022 ] Mean training loss: 0.9563.
148
+ [ Thu Sep 8 01:33:58 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 01:33:58 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 01:33:58 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 01:40:39 2022 ] Mean training loss: 0.9399.
152
+ [ Thu Sep 8 01:40:39 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 01:40:39 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 01:40:39 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 01:47:18 2022 ] Mean training loss: 0.9570.
156
+ [ Thu Sep 8 01:47:18 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 01:47:18 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 01:47:18 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 01:53:59 2022 ] Mean training loss: 0.9351.
160
+ [ Thu Sep 8 01:53:59 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 01:53:59 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 01:53:59 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 02:00:40 2022 ] Mean training loss: 0.9485.
164
+ [ Thu Sep 8 02:00:40 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 02:00:40 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 02:00:40 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 02:07:22 2022 ] Mean training loss: 0.9391.
168
+ [ Thu Sep 8 02:07:22 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 02:07:22 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 02:07:22 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 02:14:02 2022 ] Mean training loss: 0.9480.
172
+ [ Thu Sep 8 02:14:02 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 02:14:02 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 02:14:02 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 02:20:42 2022 ] Mean training loss: 0.9306.
176
+ [ Thu Sep 8 02:20:42 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 02:20:42 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 02:20:42 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 02:27:22 2022 ] Mean training loss: 0.9404.
180
+ [ Thu Sep 8 02:27:22 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 02:27:22 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 02:27:22 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 02:34:02 2022 ] Mean training loss: 0.9390.
184
+ [ Thu Sep 8 02:34:02 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 02:34:02 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 02:34:02 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 02:40:42 2022 ] Mean training loss: 0.9434.
188
+ [ Thu Sep 8 02:40:42 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 02:40:42 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 02:40:42 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 02:47:24 2022 ] Mean training loss: 0.9347.
192
+ [ Thu Sep 8 02:47:24 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 02:47:24 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 02:47:24 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 02:54:05 2022 ] Mean training loss: 0.9452.
196
+ [ Thu Sep 8 02:54:05 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 02:54:05 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 02:54:05 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 03:00:46 2022 ] Mean training loss: 0.9305.
200
+ [ Thu Sep 8 03:00:46 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 03:00:46 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 03:00:46 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 03:07:27 2022 ] Mean training loss: 0.9310.
204
+ [ Thu Sep 8 03:07:27 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 03:07:27 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 03:07:27 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 03:14:07 2022 ] Mean training loss: 0.4632.
208
+ [ Thu Sep 8 03:14:07 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 03:14:07 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 03:21:03 2022 ] Epoch 51 Curr Acc: (26802/50919)52.64%
211
+ [ Thu Sep 8 03:21:03 2022 ] Epoch 51 Best Acc 52.64%
212
+ [ Thu Sep 8 03:21:03 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 03:21:03 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 03:27:41 2022 ] Mean training loss: 0.3347.
215
+ [ Thu Sep 8 03:27:41 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 03:27:41 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 03:34:29 2022 ] Epoch 52 Curr Acc: (28067/50919)55.12%
218
+ [ Thu Sep 8 03:34:29 2022 ] Epoch 52 Best Acc 55.12%
219
+ [ Thu Sep 8 03:34:29 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 03:34:29 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 03:41:07 2022 ] Mean training loss: 0.2764.
222
+ [ Thu Sep 8 03:41:07 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 03:41:07 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 03:47:55 2022 ] Epoch 53 Curr Acc: (28580/50919)56.13%
225
+ [ Thu Sep 8 03:47:55 2022 ] Epoch 53 Best Acc 56.13%
226
+ [ Thu Sep 8 03:47:55 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 03:47:55 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 03:54:33 2022 ] Mean training loss: 0.2362.
229
+ [ Thu Sep 8 03:54:33 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 03:54:33 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 04:01:20 2022 ] Epoch 54 Curr Acc: (28779/50919)56.52%
232
+ [ Thu Sep 8 04:01:20 2022 ] Epoch 54 Best Acc 56.52%
233
+ [ Thu Sep 8 04:01:20 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 04:01:20 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 04:07:58 2022 ] Mean training loss: 0.1985.
236
+ [ Thu Sep 8 04:07:58 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 04:07:58 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 04:14:46 2022 ] Epoch 55 Curr Acc: (28006/50919)55.00%
239
+ [ Thu Sep 8 04:14:46 2022 ] Epoch 54 Best Acc 56.52%
240
+ [ Thu Sep 8 04:14:46 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 04:14:46 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 04:21:23 2022 ] Mean training loss: 0.1702.
243
+ [ Thu Sep 8 04:21:23 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 04:21:23 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 04:28:11 2022 ] Epoch 56 Curr Acc: (28411/50919)55.80%
246
+ [ Thu Sep 8 04:28:11 2022 ] Epoch 54 Best Acc 56.52%
247
+ [ Thu Sep 8 04:28:11 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 04:28:11 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 04:34:50 2022 ] Mean training loss: 0.1436.
250
+ [ Thu Sep 8 04:34:50 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 04:34:51 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 04:41:38 2022 ] Epoch 57 Curr Acc: (28130/50919)55.24%
253
+ [ Thu Sep 8 04:41:38 2022 ] Epoch 54 Best Acc 56.52%
254
+ [ Thu Sep 8 04:41:38 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 04:41:38 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 04:48:17 2022 ] Mean training loss: 0.1257.
257
+ [ Thu Sep 8 04:48:17 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 04:48:17 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 04:55:04 2022 ] Epoch 58 Curr Acc: (27163/50919)53.35%
260
+ [ Thu Sep 8 04:55:04 2022 ] Epoch 54 Best Acc 56.52%
261
+ [ Thu Sep 8 04:55:04 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 04:55:04 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 05:01:44 2022 ] Mean training loss: 0.1122.
264
+ [ Thu Sep 8 05:01:44 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 05:01:44 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 05:08:32 2022 ] Epoch 59 Curr Acc: (28446/50919)55.87%
267
+ [ Thu Sep 8 05:08:32 2022 ] Epoch 54 Best Acc 56.52%
268
+ [ Thu Sep 8 05:08:32 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 05:08:32 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 05:15:11 2022 ] Mean training loss: 0.1003.
271
+ [ Thu Sep 8 05:15:11 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 05:15:11 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 05:21:58 2022 ] Epoch 60 Curr Acc: (27848/50919)54.69%
274
+ [ Thu Sep 8 05:21:58 2022 ] Epoch 54 Best Acc 56.52%
275
+ [ Thu Sep 8 05:21:58 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 05:21:58 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 05:28:37 2022 ] Mean training loss: 0.0900.
278
+ [ Thu Sep 8 05:28:37 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 05:28:37 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 05:35:24 2022 ] Epoch 61 Curr Acc: (28017/50919)55.02%
281
+ [ Thu Sep 8 05:35:24 2022 ] Epoch 54 Best Acc 56.52%
282
+ [ Thu Sep 8 05:35:24 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 05:35:24 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 05:42:03 2022 ] Mean training loss: 0.0846.
285
+ [ Thu Sep 8 05:42:03 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 05:42:03 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 05:48:51 2022 ] Epoch 62 Curr Acc: (27292/50919)53.60%
288
+ [ Thu Sep 8 05:48:51 2022 ] Epoch 54 Best Acc 56.52%
289
+ [ Thu Sep 8 05:48:51 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 05:48:51 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 05:55:29 2022 ] Mean training loss: 0.0817.
292
+ [ Thu Sep 8 05:55:29 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 05:55:29 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 06:02:16 2022 ] Epoch 63 Curr Acc: (27251/50919)53.52%
295
+ [ Thu Sep 8 06:02:16 2022 ] Epoch 54 Best Acc 56.52%
296
+ [ Thu Sep 8 06:02:16 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 06:02:16 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 06:08:55 2022 ] Mean training loss: 0.0857.
299
+ [ Thu Sep 8 06:08:55 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 06:08:55 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 06:15:42 2022 ] Epoch 64 Curr Acc: (27321/50919)53.66%
302
+ [ Thu Sep 8 06:15:42 2022 ] Epoch 54 Best Acc 56.52%
303
+ [ Thu Sep 8 06:15:42 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 06:15:42 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 06:22:21 2022 ] Mean training loss: 0.0840.
306
+ [ Thu Sep 8 06:22:21 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 06:22:21 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 06:29:08 2022 ] Epoch 65 Curr Acc: (27135/50919)53.29%
309
+ [ Thu Sep 8 06:29:08 2022 ] Epoch 54 Best Acc 56.52%
310
+ [ Thu Sep 8 06:29:08 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 06:29:08 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 06:35:46 2022 ] Mean training loss: 0.0902.
313
+ [ Thu Sep 8 06:35:46 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 06:35:46 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 06:42:33 2022 ] Epoch 66 Curr Acc: (27501/50919)54.01%
316
+ [ Thu Sep 8 06:42:33 2022 ] Epoch 54 Best Acc 56.52%
317
+ [ Thu Sep 8 06:42:33 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 06:42:33 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 06:49:10 2022 ] Mean training loss: 0.0792.
320
+ [ Thu Sep 8 06:49:10 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 06:49:10 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 06:55:57 2022 ] Epoch 67 Curr Acc: (26786/50919)52.61%
323
+ [ Thu Sep 8 06:55:57 2022 ] Epoch 54 Best Acc 56.52%
324
+ [ Thu Sep 8 06:55:57 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 06:55:57 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 07:02:36 2022 ] Mean training loss: 0.0829.
327
+ [ Thu Sep 8 07:02:36 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 07:02:36 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 07:09:23 2022 ] Epoch 68 Curr Acc: (27327/50919)53.67%
330
+ [ Thu Sep 8 07:09:23 2022 ] Epoch 54 Best Acc 56.52%
331
+ [ Thu Sep 8 07:09:23 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 07:09:23 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 07:16:02 2022 ] Mean training loss: 0.0866.
334
+ [ Thu Sep 8 07:16:02 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 07:16:02 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 07:22:49 2022 ] Epoch 69 Curr Acc: (27766/50919)54.53%
337
+ [ Thu Sep 8 07:22:49 2022 ] Epoch 54 Best Acc 56.52%
338
+ [ Thu Sep 8 07:22:49 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 07:22:49 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 07:29:28 2022 ] Mean training loss: 0.0811.
341
+ [ Thu Sep 8 07:29:28 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 07:29:28 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 07:36:14 2022 ] Epoch 70 Curr Acc: (26193/50919)51.44%
344
+ [ Thu Sep 8 07:36:14 2022 ] Epoch 54 Best Acc 56.52%
345
+ [ Thu Sep 8 07:36:14 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 07:36:14 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 07:42:53 2022 ] Mean training loss: 0.0408.
348
+ [ Thu Sep 8 07:42:53 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 07:42:53 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 07:49:40 2022 ] Epoch 71 Curr Acc: (28004/50919)55.00%
351
+ [ Thu Sep 8 07:49:40 2022 ] Epoch 54 Best Acc 56.52%
352
+ [ Thu Sep 8 07:49:40 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 07:49:40 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 07:56:18 2022 ] Mean training loss: 0.0294.
355
+ [ Thu Sep 8 07:56:18 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 07:56:18 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 08:03:04 2022 ] Epoch 72 Curr Acc: (28081/50919)55.15%
358
+ [ Thu Sep 8 08:03:04 2022 ] Epoch 54 Best Acc 56.52%
359
+ [ Thu Sep 8 08:03:04 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 08:03:04 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 08:09:43 2022 ] Mean training loss: 0.0240.
362
+ [ Thu Sep 8 08:09:43 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 08:09:43 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 08:16:29 2022 ] Epoch 73 Curr Acc: (28268/50919)55.52%
365
+ [ Thu Sep 8 08:16:29 2022 ] Epoch 54 Best Acc 56.52%
366
+ [ Thu Sep 8 08:16:29 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 08:16:29 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 08:23:07 2022 ] Mean training loss: 0.0197.
369
+ [ Thu Sep 8 08:23:07 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 08:23:07 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 08:29:54 2022 ] Epoch 74 Curr Acc: (27956/50919)54.90%
372
+ [ Thu Sep 8 08:29:54 2022 ] Epoch 54 Best Acc 56.52%
373
+ [ Thu Sep 8 08:29:54 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 08:29:54 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 08:36:32 2022 ] Mean training loss: 0.0185.
376
+ [ Thu Sep 8 08:36:32 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 08:36:32 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 08:43:19 2022 ] Epoch 75 Curr Acc: (28479/50919)55.93%
379
+ [ Thu Sep 8 08:43:19 2022 ] Epoch 54 Best Acc 56.52%
380
+ [ Thu Sep 8 08:43:19 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 08:43:19 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 08:49:58 2022 ] Mean training loss: 0.0187.
383
+ [ Thu Sep 8 08:49:58 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 08:49:58 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 08:56:45 2022 ] Epoch 76 Curr Acc: (27736/50919)54.47%
386
+ [ Thu Sep 8 08:56:45 2022 ] Epoch 54 Best Acc 56.52%
387
+ [ Thu Sep 8 08:56:45 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 08:56:45 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 09:03:23 2022 ] Mean training loss: 0.0174.
390
+ [ Thu Sep 8 09:03:23 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 09:03:23 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 09:10:10 2022 ] Epoch 77 Curr Acc: (28292/50919)55.56%
393
+ [ Thu Sep 8 09:10:10 2022 ] Epoch 54 Best Acc 56.52%
394
+ [ Thu Sep 8 09:10:10 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 09:10:10 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 09:16:49 2022 ] Mean training loss: 0.0167.
397
+ [ Thu Sep 8 09:16:49 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 09:16:50 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 09:23:37 2022 ] Epoch 78 Curr Acc: (28095/50919)55.18%
400
+ [ Thu Sep 8 09:23:37 2022 ] Epoch 54 Best Acc 56.52%
401
+ [ Thu Sep 8 09:23:37 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 09:23:37 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 09:30:16 2022 ] Mean training loss: 0.0166.
404
+ [ Thu Sep 8 09:30:16 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 09:30:16 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 09:37:03 2022 ] Epoch 79 Curr Acc: (27909/50919)54.81%
407
+ [ Thu Sep 8 09:37:03 2022 ] Epoch 54 Best Acc 56.52%
408
+ [ Thu Sep 8 09:37:03 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 09:37:03 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 09:43:41 2022 ] Mean training loss: 0.0145.
411
+ [ Thu Sep 8 09:43:41 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 09:43:41 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 09:50:28 2022 ] Epoch 80 Curr Acc: (27822/50919)54.64%
414
+ [ Thu Sep 8 09:50:28 2022 ] Epoch 54 Best Acc 56.52%
415
+ [ Thu Sep 8 09:50:28 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 09:50:28 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 09:57:07 2022 ] Mean training loss: 0.0152.
418
+ [ Thu Sep 8 09:57:07 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 09:57:07 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 10:03:54 2022 ] Epoch 81 Curr Acc: (28262/50919)55.50%
421
+ [ Thu Sep 8 10:03:54 2022 ] Epoch 54 Best Acc 56.52%
422
+ [ Thu Sep 8 10:03:54 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 10:03:54 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 10:10:33 2022 ] Mean training loss: 0.0138.
425
+ [ Thu Sep 8 10:10:33 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 10:10:33 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 10:17:21 2022 ] Epoch 82 Curr Acc: (28551/50919)56.07%
428
+ [ Thu Sep 8 10:17:21 2022 ] Epoch 54 Best Acc 56.52%
429
+ [ Thu Sep 8 10:17:21 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 10:17:21 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 10:24:00 2022 ] Mean training loss: 0.0138.
432
+ [ Thu Sep 8 10:24:00 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 10:24:00 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 10:30:48 2022 ] Epoch 83 Curr Acc: (28096/50919)55.18%
435
+ [ Thu Sep 8 10:30:48 2022 ] Epoch 54 Best Acc 56.52%
436
+ [ Thu Sep 8 10:30:48 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 10:30:48 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 10:37:27 2022 ] Mean training loss: 0.0137.
439
+ [ Thu Sep 8 10:37:27 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 10:37:27 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 10:44:14 2022 ] Epoch 84 Curr Acc: (27911/50919)54.81%
442
+ [ Thu Sep 8 10:44:14 2022 ] Epoch 54 Best Acc 56.52%
443
+ [ Thu Sep 8 10:44:14 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 10:44:14 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 10:50:53 2022 ] Mean training loss: 0.0149.
446
+ [ Thu Sep 8 10:50:53 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 10:50:53 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 10:57:41 2022 ] Epoch 85 Curr Acc: (28660/50919)56.29%
449
+ [ Thu Sep 8 10:57:41 2022 ] Epoch 54 Best Acc 56.52%
450
+ [ Thu Sep 8 10:57:41 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 10:57:41 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 11:04:18 2022 ] Mean training loss: 0.0134.
453
+ [ Thu Sep 8 11:04:18 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 11:04:18 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 11:11:05 2022 ] Epoch 86 Curr Acc: (28392/50919)55.76%
456
+ [ Thu Sep 8 11:11:05 2022 ] Epoch 54 Best Acc 56.52%
457
+ [ Thu Sep 8 11:11:05 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 11:11:05 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 11:17:44 2022 ] Mean training loss: 0.0134.
460
+ [ Thu Sep 8 11:17:44 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 11:17:44 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 11:24:32 2022 ] Epoch 87 Curr Acc: (28551/50919)56.07%
463
+ [ Thu Sep 8 11:24:32 2022 ] Epoch 54 Best Acc 56.52%
464
+ [ Thu Sep 8 11:24:32 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 11:24:32 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 11:31:12 2022 ] Mean training loss: 0.0114.
467
+ [ Thu Sep 8 11:31:12 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 11:31:12 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 11:38:00 2022 ] Epoch 88 Curr Acc: (28432/50919)55.84%
470
+ [ Thu Sep 8 11:38:00 2022 ] Epoch 54 Best Acc 56.52%
471
+ [ Thu Sep 8 11:38:00 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 11:38:00 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 11:44:39 2022 ] Mean training loss: 0.0120.
474
+ [ Thu Sep 8 11:44:39 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 11:44:39 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 11:51:27 2022 ] Epoch 89 Curr Acc: (28606/50919)56.18%
477
+ [ Thu Sep 8 11:51:27 2022 ] Epoch 54 Best Acc 56.52%
478
+ [ Thu Sep 8 11:51:27 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 11:51:27 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 11:58:07 2022 ] Mean training loss: 0.0120.
481
+ [ Thu Sep 8 11:58:07 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 11:58:07 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 12:04:55 2022 ] Epoch 90 Curr Acc: (28496/50919)55.96%
484
+ [ Thu Sep 8 12:04:55 2022 ] Epoch 54 Best Acc 56.52%
485
+ [ Thu Sep 8 12:04:55 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 12:04:55 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 12:11:36 2022 ] Mean training loss: 0.0114.
488
+ [ Thu Sep 8 12:11:36 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 12:11:36 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 12:18:24 2022 ] Epoch 91 Curr Acc: (28335/50919)55.65%
491
+ [ Thu Sep 8 12:18:24 2022 ] Epoch 54 Best Acc 56.52%
492
+ [ Thu Sep 8 12:18:24 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 12:18:24 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 12:25:03 2022 ] Mean training loss: 0.0117.
495
+ [ Thu Sep 8 12:25:03 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 12:25:03 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 12:31:52 2022 ] Epoch 92 Curr Acc: (28666/50919)56.30%
498
+ [ Thu Sep 8 12:31:52 2022 ] Epoch 54 Best Acc 56.52%
499
+ [ Thu Sep 8 12:31:52 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 12:31:52 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 12:38:30 2022 ] Mean training loss: 0.0121.
502
+ [ Thu Sep 8 12:38:30 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 12:38:31 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 12:45:19 2022 ] Epoch 93 Curr Acc: (28146/50919)55.28%
505
+ [ Thu Sep 8 12:45:19 2022 ] Epoch 54 Best Acc 56.52%
506
+ [ Thu Sep 8 12:45:19 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 12:45:19 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 12:51:58 2022 ] Mean training loss: 0.0111.
509
+ [ Thu Sep 8 12:51:58 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 12:51:58 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 12:58:47 2022 ] Epoch 94 Curr Acc: (28741/50919)56.44%
512
+ [ Thu Sep 8 12:58:47 2022 ] Epoch 54 Best Acc 56.52%
513
+ [ Thu Sep 8 12:58:47 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 12:58:47 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 13:05:26 2022 ] Mean training loss: 0.0116.
516
+ [ Thu Sep 8 13:05:26 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 13:05:26 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 13:12:14 2022 ] Epoch 95 Curr Acc: (28011/50919)55.01%
519
+ [ Thu Sep 8 13:12:14 2022 ] Epoch 54 Best Acc 56.52%
520
+ [ Thu Sep 8 13:12:14 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 13:12:14 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 13:18:54 2022 ] Mean training loss: 0.0116.
523
+ [ Thu Sep 8 13:18:54 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 13:18:54 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 13:25:42 2022 ] Epoch 96 Curr Acc: (27446/50919)53.90%
526
+ [ Thu Sep 8 13:25:42 2022 ] Epoch 54 Best Acc 56.52%
527
+ [ Thu Sep 8 13:25:42 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 13:25:42 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 13:32:23 2022 ] Mean training loss: 0.0113.
530
+ [ Thu Sep 8 13:32:23 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 13:32:23 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 13:39:11 2022 ] Epoch 97 Curr Acc: (28261/50919)55.50%
533
+ [ Thu Sep 8 13:39:11 2022 ] Epoch 54 Best Acc 56.52%
534
+ [ Thu Sep 8 13:39:11 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 13:39:11 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 13:45:52 2022 ] Mean training loss: 0.0113.
537
+ [ Thu Sep 8 13:45:52 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 13:45:52 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 13:52:39 2022 ] Epoch 98 Curr Acc: (28415/50919)55.80%
540
+ [ Thu Sep 8 13:52:39 2022 ] Epoch 54 Best Acc 56.52%
541
+ [ Thu Sep 8 13:52:39 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 13:52:39 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 13:59:17 2022 ] Mean training loss: 0.0111.
544
+ [ Thu Sep 8 13:59:17 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 13:59:17 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 14:06:04 2022 ] Epoch 99 Curr Acc: (28017/50919)55.02%
547
+ [ Thu Sep 8 14:06:04 2022 ] Epoch 54 Best Acc 56.52%
548
+ [ Thu Sep 8 14:06:04 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 14:06:04 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 14:12:42 2022 ] Mean training loss: 0.0104.
551
+ [ Thu Sep 8 14:12:42 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 14:12:42 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 14:19:29 2022 ] Epoch 100 Curr Acc: (28442/50919)55.86%
554
+ [ Thu Sep 8 14:19:29 2022 ] Epoch 54 Best Acc 56.52%
555
+ [ Thu Sep 8 14:19:29 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 14:19:29 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 14:26:07 2022 ] Mean training loss: 0.0109.
558
+ [ Thu Sep 8 14:26:07 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 14:26:07 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 14:32:54 2022 ] Epoch 101 Curr Acc: (28236/50919)55.45%
561
+ [ Thu Sep 8 14:32:54 2022 ] Epoch 54 Best Acc 56.52%
562
+ [ Thu Sep 8 14:32:54 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 14:32:54 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 14:39:33 2022 ] Mean training loss: 0.0116.
565
+ [ Thu Sep 8 14:39:33 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 14:39:33 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 14:46:20 2022 ] Epoch 102 Curr Acc: (28441/50919)55.86%
568
+ [ Thu Sep 8 14:46:20 2022 ] Epoch 54 Best Acc 56.52%
569
+ [ Thu Sep 8 14:46:20 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 14:46:20 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 14:52:58 2022 ] Mean training loss: 0.0122.
572
+ [ Thu Sep 8 14:52:58 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 14:52:58 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 14:59:45 2022 ] Epoch 103 Curr Acc: (28322/50919)55.62%
575
+ [ Thu Sep 8 14:59:45 2022 ] Epoch 54 Best Acc 56.52%
576
+ [ Thu Sep 8 14:59:45 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 14:59:45 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 15:06:24 2022 ] Mean training loss: 0.0116.
579
+ [ Thu Sep 8 15:06:24 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 15:06:24 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 15:13:11 2022 ] Epoch 104 Curr Acc: (28575/50919)56.12%
582
+ [ Thu Sep 8 15:13:11 2022 ] Epoch 54 Best Acc 56.52%
583
+ [ Thu Sep 8 15:13:11 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 15:13:11 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 15:19:49 2022 ] Mean training loss: 0.0110.
586
+ [ Thu Sep 8 15:19:49 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 15:19:49 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 15:26:35 2022 ] Epoch 105 Curr Acc: (28136/50919)55.26%
589
+ [ Thu Sep 8 15:26:35 2022 ] Epoch 54 Best Acc 56.52%
590
+ [ Thu Sep 8 15:26:35 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 15:26:35 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 15:33:14 2022 ] Mean training loss: 0.0110.
593
+ [ Thu Sep 8 15:33:14 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 15:33:14 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 15:40:00 2022 ] Epoch 106 Curr Acc: (28410/50919)55.79%
596
+ [ Thu Sep 8 15:40:00 2022 ] Epoch 54 Best Acc 56.52%
597
+ [ Thu Sep 8 15:40:00 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 15:40:00 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 15:46:37 2022 ] Mean training loss: 0.0112.
600
+ [ Thu Sep 8 15:46:37 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 15:46:37 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 15:53:24 2022 ] Epoch 107 Curr Acc: (28708/50919)56.38%
603
+ [ Thu Sep 8 15:53:24 2022 ] Epoch 54 Best Acc 56.52%
604
+ [ Thu Sep 8 15:53:24 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 15:53:24 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 16:00:02 2022 ] Mean training loss: 0.0112.
607
+ [ Thu Sep 8 16:00:02 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 16:00:02 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 16:06:49 2022 ] Epoch 108 Curr Acc: (28446/50919)55.87%
610
+ [ Thu Sep 8 16:06:49 2022 ] Epoch 54 Best Acc 56.52%
611
+ [ Thu Sep 8 16:06:49 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 16:06:49 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 16:13:28 2022 ] Mean training loss: 0.0108.
614
+ [ Thu Sep 8 16:13:28 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 16:13:28 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 16:20:14 2022 ] Epoch 109 Curr Acc: (28427/50919)55.83%
617
+ [ Thu Sep 8 16:20:14 2022 ] Epoch 54 Best Acc 56.52%
618
+ [ Thu Sep 8 16:20:14 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 16:20:14 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 16:26:53 2022 ] Mean training loss: 0.0121.
621
+ [ Thu Sep 8 16:26:53 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 16:26:53 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 16:33:39 2022 ] Epoch 110 Curr Acc: (28147/50919)55.28%
624
+ [ Thu Sep 8 16:33:39 2022 ] Epoch 54 Best Acc 56.52%
625
+ [ Thu Sep 8 16:33:39 2022 ] epoch: 54, best accuracy: 0.5651917751723325
626
+ [ Thu Sep 8 16:33:39 2022 ] Experiment: ./work_dir/ntu120/xsub_bm
627
+ [ Thu Sep 8 16:33:39 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 16:33:39 2022 ] Load weights from ./runs/ntu120/xsub_bm/runs-53-105300.pt.
629
+ [ Thu Sep 8 16:33:39 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 16:40:24 2022 ] Epoch 1 Curr Acc: (28779/50919)56.52%
631
+ [ Thu Sep 8 16:40:24 2022 ] Epoch 54 Best Acc 56.52%
ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xsub_j.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xsub_j/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xsub_j
ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:970fb96c694e69cc7b3de27dda5a9160f196839288f4f24f73d3500159078621
3
+ size 29946137
ckpt/Others/MST-GCN/ntu120_xsub/xsub_j/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:34:48 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:34:48 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xsub_j', 'model_saved_name': './runs/ntu120/xsub_j/runs', 'config': 'config/ntu120/xsub_j.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:34:48 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:34:48 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:41:24 2022 ] Mean training loss: 3.4110.
8
+ [ Wed Sep 7 21:41:24 2022 ] Time consumption: [Data]01%, [Network]99%
9
+ [ Wed Sep 7 21:41:24 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:41:24 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:47:59 2022 ] Mean training loss: 2.4547.
12
+ [ Wed Sep 7 21:47:59 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:47:59 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:47:59 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:54:36 2022 ] Mean training loss: 1.9976.
16
+ [ Wed Sep 7 21:54:36 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:54:36 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:54:36 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 22:01:13 2022 ] Mean training loss: 1.7330.
20
+ [ Wed Sep 7 22:01:13 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 22:01:13 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 22:01:13 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 22:07:49 2022 ] Mean training loss: 1.5684.
24
+ [ Wed Sep 7 22:07:49 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 22:07:49 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 22:07:49 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:14:24 2022 ] Mean training loss: 1.4553.
28
+ [ Wed Sep 7 22:14:24 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:14:24 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:14:24 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:21:00 2022 ] Mean training loss: 1.4066.
32
+ [ Wed Sep 7 22:21:00 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:21:00 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:21:00 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:27:36 2022 ] Mean training loss: 1.3621.
36
+ [ Wed Sep 7 22:27:36 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:27:36 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:27:36 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:34:12 2022 ] Mean training loss: 1.3254.
40
+ [ Wed Sep 7 22:34:12 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:34:12 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:34:12 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:40:48 2022 ] Mean training loss: 1.3225.
44
+ [ Wed Sep 7 22:40:48 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:40:48 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:40:48 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:47:25 2022 ] Mean training loss: 1.2530.
48
+ [ Wed Sep 7 22:47:25 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:47:25 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:47:25 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:53:59 2022 ] Mean training loss: 1.1999.
52
+ [ Wed Sep 7 22:53:59 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:53:59 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:53:59 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 23:00:34 2022 ] Mean training loss: 1.1760.
56
+ [ Wed Sep 7 23:00:34 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 23:00:34 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 23:00:34 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 23:07:09 2022 ] Mean training loss: 1.1365.
60
+ [ Wed Sep 7 23:07:09 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 23:07:09 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 23:07:09 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 23:13:44 2022 ] Mean training loss: 1.1145.
64
+ [ Wed Sep 7 23:13:44 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 23:13:44 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 23:13:44 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 23:20:21 2022 ] Mean training loss: 1.0964.
68
+ [ Wed Sep 7 23:20:21 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 23:20:21 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 23:20:21 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 23:26:58 2022 ] Mean training loss: 1.0684.
72
+ [ Wed Sep 7 23:26:58 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 23:26:58 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 23:26:58 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 23:33:34 2022 ] Mean training loss: 1.0638.
76
+ [ Wed Sep 7 23:33:34 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 23:33:34 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 23:33:34 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 23:40:10 2022 ] Mean training loss: 1.0479.
80
+ [ Wed Sep 7 23:40:10 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 23:40:10 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 23:40:10 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:46:45 2022 ] Mean training loss: 1.0294.
84
+ [ Wed Sep 7 23:46:45 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:46:45 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:46:45 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:53:21 2022 ] Mean training loss: 1.0288.
88
+ [ Wed Sep 7 23:53:21 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:53:21 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:53:21 2022 ] Learning rate: 0.15
91
+ [ Wed Sep 7 23:59:56 2022 ] Mean training loss: 1.0129.
92
+ [ Wed Sep 7 23:59:56 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Wed Sep 7 23:59:56 2022 ] Training epoch: 23
94
+ [ Wed Sep 7 23:59:56 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 00:06:33 2022 ] Mean training loss: 1.0015.
96
+ [ Thu Sep 8 00:06:33 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Thu Sep 8 00:06:33 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 00:06:33 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 00:13:08 2022 ] Mean training loss: 1.0029.
100
+ [ Thu Sep 8 00:13:08 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Thu Sep 8 00:13:08 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 00:13:08 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 00:19:44 2022 ] Mean training loss: 0.9837.
104
+ [ Thu Sep 8 00:19:44 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Thu Sep 8 00:19:44 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 00:19:44 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 00:26:20 2022 ] Mean training loss: 0.9914.
108
+ [ Thu Sep 8 00:26:20 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Thu Sep 8 00:26:20 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 00:26:20 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 00:32:56 2022 ] Mean training loss: 0.9702.
112
+ [ Thu Sep 8 00:32:56 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Thu Sep 8 00:32:56 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 00:32:56 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 00:39:32 2022 ] Mean training loss: 0.9798.
116
+ [ Thu Sep 8 00:39:32 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Thu Sep 8 00:39:32 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 00:39:32 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 00:46:08 2022 ] Mean training loss: 0.9441.
120
+ [ Thu Sep 8 00:46:08 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Thu Sep 8 00:46:08 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 00:46:08 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 00:52:43 2022 ] Mean training loss: 0.9694.
124
+ [ Thu Sep 8 00:52:43 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Thu Sep 8 00:52:43 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 00:52:43 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 00:59:18 2022 ] Mean training loss: 0.9698.
128
+ [ Thu Sep 8 00:59:18 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Thu Sep 8 00:59:18 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 00:59:18 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 01:05:53 2022 ] Mean training loss: 0.9586.
132
+ [ Thu Sep 8 01:05:53 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Thu Sep 8 01:05:53 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 01:05:53 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 01:12:28 2022 ] Mean training loss: 0.9430.
136
+ [ Thu Sep 8 01:12:28 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 01:12:28 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 01:12:28 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 01:19:05 2022 ] Mean training loss: 0.9436.
140
+ [ Thu Sep 8 01:19:05 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 01:19:05 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 01:19:05 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 01:25:42 2022 ] Mean training loss: 0.9375.
144
+ [ Thu Sep 8 01:25:42 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 01:25:42 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 01:25:42 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 01:32:18 2022 ] Mean training loss: 0.9467.
148
+ [ Thu Sep 8 01:32:18 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 01:32:18 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 01:32:18 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 01:38:54 2022 ] Mean training loss: 0.9447.
152
+ [ Thu Sep 8 01:38:54 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 01:38:54 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 01:38:54 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 01:45:30 2022 ] Mean training loss: 0.9448.
156
+ [ Thu Sep 8 01:45:30 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 01:45:30 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 01:45:30 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 01:52:06 2022 ] Mean training loss: 0.9296.
160
+ [ Thu Sep 8 01:52:06 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 01:52:06 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 01:52:06 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 01:58:40 2022 ] Mean training loss: 0.9398.
164
+ [ Thu Sep 8 01:58:40 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 01:58:40 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 01:58:40 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 02:05:14 2022 ] Mean training loss: 0.9259.
168
+ [ Thu Sep 8 02:05:14 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 02:05:14 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 02:05:14 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 02:11:49 2022 ] Mean training loss: 0.9358.
172
+ [ Thu Sep 8 02:11:49 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 02:11:49 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 02:11:49 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 02:18:24 2022 ] Mean training loss: 0.9339.
176
+ [ Thu Sep 8 02:18:24 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 02:18:24 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 02:18:24 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 02:25:00 2022 ] Mean training loss: 0.9247.
180
+ [ Thu Sep 8 02:25:00 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 02:25:00 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 02:25:00 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 02:31:37 2022 ] Mean training loss: 0.9417.
184
+ [ Thu Sep 8 02:31:37 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 02:31:37 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 02:31:37 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 02:38:12 2022 ] Mean training loss: 0.9303.
188
+ [ Thu Sep 8 02:38:12 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 02:38:12 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 02:38:12 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 02:44:47 2022 ] Mean training loss: 0.9310.
192
+ [ Thu Sep 8 02:44:47 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 02:44:47 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 02:44:47 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 02:51:23 2022 ] Mean training loss: 0.9202.
196
+ [ Thu Sep 8 02:51:23 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 02:51:23 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 02:51:23 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 02:57:59 2022 ] Mean training loss: 0.9208.
200
+ [ Thu Sep 8 02:57:59 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 02:57:59 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 02:57:59 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 03:04:35 2022 ] Mean training loss: 0.9195.
204
+ [ Thu Sep 8 03:04:35 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 03:04:35 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 03:04:35 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 03:11:11 2022 ] Mean training loss: 0.4928.
208
+ [ Thu Sep 8 03:11:11 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 03:11:11 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 03:17:55 2022 ] Epoch 51 Curr Acc: (27550/50919)54.11%
211
+ [ Thu Sep 8 03:17:55 2022 ] Epoch 51 Best Acc 54.11%
212
+ [ Thu Sep 8 03:17:55 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 03:17:55 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 03:24:31 2022 ] Mean training loss: 0.3719.
215
+ [ Thu Sep 8 03:24:31 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 03:24:31 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 03:31:14 2022 ] Epoch 52 Curr Acc: (28520/50919)56.01%
218
+ [ Thu Sep 8 03:31:14 2022 ] Epoch 52 Best Acc 56.01%
219
+ [ Thu Sep 8 03:31:14 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 03:31:14 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 03:37:51 2022 ] Mean training loss: 0.3150.
222
+ [ Thu Sep 8 03:37:51 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 03:37:51 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 03:44:35 2022 ] Epoch 53 Curr Acc: (28941/50919)56.84%
225
+ [ Thu Sep 8 03:44:35 2022 ] Epoch 53 Best Acc 56.84%
226
+ [ Thu Sep 8 03:44:35 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 03:44:35 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 03:51:10 2022 ] Mean training loss: 0.2692.
229
+ [ Thu Sep 8 03:51:10 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 03:51:10 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 03:57:54 2022 ] Epoch 54 Curr Acc: (29213/50919)57.37%
232
+ [ Thu Sep 8 03:57:54 2022 ] Epoch 54 Best Acc 57.37%
233
+ [ Thu Sep 8 03:57:54 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 03:57:54 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 04:04:30 2022 ] Mean training loss: 0.2441.
236
+ [ Thu Sep 8 04:04:30 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 04:04:30 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 04:11:13 2022 ] Epoch 55 Curr Acc: (28870/50919)56.70%
239
+ [ Thu Sep 8 04:11:13 2022 ] Epoch 54 Best Acc 57.37%
240
+ [ Thu Sep 8 04:11:13 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 04:11:13 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 04:17:49 2022 ] Mean training loss: 0.2166.
243
+ [ Thu Sep 8 04:17:49 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 04:17:49 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 04:24:32 2022 ] Epoch 56 Curr Acc: (29086/50919)57.12%
246
+ [ Thu Sep 8 04:24:32 2022 ] Epoch 54 Best Acc 57.37%
247
+ [ Thu Sep 8 04:24:32 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 04:24:32 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 04:31:09 2022 ] Mean training loss: 0.1935.
250
+ [ Thu Sep 8 04:31:09 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 04:31:09 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 04:37:52 2022 ] Epoch 57 Curr Acc: (28702/50919)56.37%
253
+ [ Thu Sep 8 04:37:52 2022 ] Epoch 54 Best Acc 57.37%
254
+ [ Thu Sep 8 04:37:52 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 04:37:52 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 04:44:29 2022 ] Mean training loss: 0.1774.
257
+ [ Thu Sep 8 04:44:29 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 04:44:29 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 04:51:12 2022 ] Epoch 58 Curr Acc: (29216/50919)57.38%
260
+ [ Thu Sep 8 04:51:12 2022 ] Epoch 58 Best Acc 57.38%
261
+ [ Thu Sep 8 04:51:12 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 04:51:12 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 04:57:49 2022 ] Mean training loss: 0.1613.
264
+ [ Thu Sep 8 04:57:49 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 04:57:49 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 05:04:32 2022 ] Epoch 59 Curr Acc: (29268/50919)57.48%
267
+ [ Thu Sep 8 05:04:32 2022 ] Epoch 59 Best Acc 57.48%
268
+ [ Thu Sep 8 05:04:32 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 05:04:32 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 05:11:08 2022 ] Mean training loss: 0.1465.
271
+ [ Thu Sep 8 05:11:08 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 05:11:08 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 05:17:51 2022 ] Epoch 60 Curr Acc: (27961/50919)54.91%
274
+ [ Thu Sep 8 05:17:51 2022 ] Epoch 59 Best Acc 57.48%
275
+ [ Thu Sep 8 05:17:51 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 05:17:51 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 05:24:28 2022 ] Mean training loss: 0.1338.
278
+ [ Thu Sep 8 05:24:28 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 05:24:28 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 05:31:11 2022 ] Epoch 61 Curr Acc: (28617/50919)56.20%
281
+ [ Thu Sep 8 05:31:11 2022 ] Epoch 59 Best Acc 57.48%
282
+ [ Thu Sep 8 05:31:11 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 05:31:11 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 05:37:47 2022 ] Mean training loss: 0.1262.
285
+ [ Thu Sep 8 05:37:47 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 05:37:47 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 05:44:30 2022 ] Epoch 62 Curr Acc: (28582/50919)56.13%
288
+ [ Thu Sep 8 05:44:30 2022 ] Epoch 59 Best Acc 57.48%
289
+ [ Thu Sep 8 05:44:30 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 05:44:30 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 05:51:07 2022 ] Mean training loss: 0.1247.
292
+ [ Thu Sep 8 05:51:07 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 05:51:07 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 05:57:50 2022 ] Epoch 63 Curr Acc: (28114/50919)55.21%
295
+ [ Thu Sep 8 05:57:50 2022 ] Epoch 59 Best Acc 57.48%
296
+ [ Thu Sep 8 05:57:50 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 05:57:50 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 06:04:26 2022 ] Mean training loss: 0.1191.
299
+ [ Thu Sep 8 06:04:26 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 06:04:26 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 06:11:09 2022 ] Epoch 64 Curr Acc: (28186/50919)55.35%
302
+ [ Thu Sep 8 06:11:09 2022 ] Epoch 59 Best Acc 57.48%
303
+ [ Thu Sep 8 06:11:09 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 06:11:09 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 06:17:45 2022 ] Mean training loss: 0.1178.
306
+ [ Thu Sep 8 06:17:45 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 06:17:45 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 06:24:28 2022 ] Epoch 65 Curr Acc: (27422/50919)53.85%
309
+ [ Thu Sep 8 06:24:28 2022 ] Epoch 59 Best Acc 57.48%
310
+ [ Thu Sep 8 06:24:28 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 06:24:28 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 06:31:04 2022 ] Mean training loss: 0.1247.
313
+ [ Thu Sep 8 06:31:04 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 06:31:04 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 06:37:47 2022 ] Epoch 66 Curr Acc: (27001/50919)53.03%
316
+ [ Thu Sep 8 06:37:47 2022 ] Epoch 59 Best Acc 57.48%
317
+ [ Thu Sep 8 06:37:47 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 06:37:47 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 06:44:23 2022 ] Mean training loss: 0.1185.
320
+ [ Thu Sep 8 06:44:23 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 06:44:24 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 06:51:06 2022 ] Epoch 67 Curr Acc: (27867/50919)54.73%
323
+ [ Thu Sep 8 06:51:06 2022 ] Epoch 59 Best Acc 57.48%
324
+ [ Thu Sep 8 06:51:06 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 06:51:06 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 06:57:43 2022 ] Mean training loss: 0.1110.
327
+ [ Thu Sep 8 06:57:43 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 06:57:43 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 07:04:26 2022 ] Epoch 68 Curr Acc: (28220/50919)55.42%
330
+ [ Thu Sep 8 07:04:26 2022 ] Epoch 59 Best Acc 57.48%
331
+ [ Thu Sep 8 07:04:26 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 07:04:26 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 07:11:01 2022 ] Mean training loss: 0.1122.
334
+ [ Thu Sep 8 07:11:01 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 07:11:01 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 07:17:44 2022 ] Epoch 69 Curr Acc: (27883/50919)54.76%
337
+ [ Thu Sep 8 07:17:44 2022 ] Epoch 59 Best Acc 57.48%
338
+ [ Thu Sep 8 07:17:44 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 07:17:44 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 07:24:21 2022 ] Mean training loss: 0.1229.
341
+ [ Thu Sep 8 07:24:21 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 07:24:21 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 07:31:04 2022 ] Epoch 70 Curr Acc: (27158/50919)53.34%
344
+ [ Thu Sep 8 07:31:04 2022 ] Epoch 59 Best Acc 57.48%
345
+ [ Thu Sep 8 07:31:04 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 07:31:04 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 07:37:41 2022 ] Mean training loss: 0.0659.
348
+ [ Thu Sep 8 07:37:41 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 07:37:41 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 07:44:24 2022 ] Epoch 71 Curr Acc: (28399/50919)55.77%
351
+ [ Thu Sep 8 07:44:24 2022 ] Epoch 59 Best Acc 57.48%
352
+ [ Thu Sep 8 07:44:24 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 07:44:24 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 07:50:58 2022 ] Mean training loss: 0.0415.
355
+ [ Thu Sep 8 07:50:58 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 07:50:58 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 07:57:41 2022 ] Epoch 72 Curr Acc: (28925/50919)56.81%
358
+ [ Thu Sep 8 07:57:41 2022 ] Epoch 59 Best Acc 57.48%
359
+ [ Thu Sep 8 07:57:41 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 07:57:41 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 08:04:17 2022 ] Mean training loss: 0.0350.
362
+ [ Thu Sep 8 08:04:17 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 08:04:17 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 08:11:00 2022 ] Epoch 73 Curr Acc: (28805/50919)56.57%
365
+ [ Thu Sep 8 08:11:00 2022 ] Epoch 59 Best Acc 57.48%
366
+ [ Thu Sep 8 08:11:00 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 08:11:00 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 08:17:37 2022 ] Mean training loss: 0.0290.
369
+ [ Thu Sep 8 08:17:37 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 08:17:37 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 08:24:20 2022 ] Epoch 74 Curr Acc: (29052/50919)57.06%
372
+ [ Thu Sep 8 08:24:20 2022 ] Epoch 59 Best Acc 57.48%
373
+ [ Thu Sep 8 08:24:20 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 08:24:20 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 08:30:56 2022 ] Mean training loss: 0.0276.
376
+ [ Thu Sep 8 08:30:56 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 08:30:56 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 08:37:39 2022 ] Epoch 75 Curr Acc: (28863/50919)56.68%
379
+ [ Thu Sep 8 08:37:39 2022 ] Epoch 59 Best Acc 57.48%
380
+ [ Thu Sep 8 08:37:39 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 08:37:39 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 08:44:16 2022 ] Mean training loss: 0.0257.
383
+ [ Thu Sep 8 08:44:16 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 08:44:16 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 08:50:58 2022 ] Epoch 76 Curr Acc: (28568/50919)56.10%
386
+ [ Thu Sep 8 08:50:58 2022 ] Epoch 59 Best Acc 57.48%
387
+ [ Thu Sep 8 08:50:58 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 08:50:58 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 08:57:35 2022 ] Mean training loss: 0.0261.
390
+ [ Thu Sep 8 08:57:35 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 08:57:35 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 09:04:18 2022 ] Epoch 77 Curr Acc: (28749/50919)56.46%
393
+ [ Thu Sep 8 09:04:18 2022 ] Epoch 59 Best Acc 57.48%
394
+ [ Thu Sep 8 09:04:18 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 09:04:18 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 09:10:55 2022 ] Mean training loss: 0.0242.
397
+ [ Thu Sep 8 09:10:55 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 09:10:55 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 09:17:38 2022 ] Epoch 78 Curr Acc: (28752/50919)56.47%
400
+ [ Thu Sep 8 09:17:38 2022 ] Epoch 59 Best Acc 57.48%
401
+ [ Thu Sep 8 09:17:38 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 09:17:38 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 09:24:15 2022 ] Mean training loss: 0.0220.
404
+ [ Thu Sep 8 09:24:15 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 09:24:15 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 09:30:58 2022 ] Epoch 79 Curr Acc: (28827/50919)56.61%
407
+ [ Thu Sep 8 09:30:58 2022 ] Epoch 59 Best Acc 57.48%
408
+ [ Thu Sep 8 09:30:58 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 09:30:58 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 09:37:34 2022 ] Mean training loss: 0.0207.
411
+ [ Thu Sep 8 09:37:34 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 09:37:34 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 09:44:16 2022 ] Epoch 80 Curr Acc: (28801/50919)56.56%
414
+ [ Thu Sep 8 09:44:16 2022 ] Epoch 59 Best Acc 57.48%
415
+ [ Thu Sep 8 09:44:17 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 09:44:17 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 09:50:54 2022 ] Mean training loss: 0.0207.
418
+ [ Thu Sep 8 09:50:54 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 09:50:54 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 09:57:37 2022 ] Epoch 81 Curr Acc: (28660/50919)56.29%
421
+ [ Thu Sep 8 09:57:37 2022 ] Epoch 59 Best Acc 57.48%
422
+ [ Thu Sep 8 09:57:37 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 09:57:37 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 10:04:13 2022 ] Mean training loss: 0.0191.
425
+ [ Thu Sep 8 10:04:13 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 10:04:13 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 10:10:56 2022 ] Epoch 82 Curr Acc: (29050/50919)57.05%
428
+ [ Thu Sep 8 10:10:56 2022 ] Epoch 59 Best Acc 57.48%
429
+ [ Thu Sep 8 10:10:56 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 10:10:56 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 10:17:32 2022 ] Mean training loss: 0.0185.
432
+ [ Thu Sep 8 10:17:32 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 10:17:33 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 10:24:16 2022 ] Epoch 83 Curr Acc: (28827/50919)56.61%
435
+ [ Thu Sep 8 10:24:16 2022 ] Epoch 59 Best Acc 57.48%
436
+ [ Thu Sep 8 10:24:16 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 10:24:16 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 10:30:52 2022 ] Mean training loss: 0.0185.
439
+ [ Thu Sep 8 10:30:52 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 10:30:52 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 10:37:35 2022 ] Epoch 84 Curr Acc: (29072/50919)57.09%
442
+ [ Thu Sep 8 10:37:35 2022 ] Epoch 59 Best Acc 57.48%
443
+ [ Thu Sep 8 10:37:35 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 10:37:35 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 10:44:12 2022 ] Mean training loss: 0.0185.
446
+ [ Thu Sep 8 10:44:12 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 10:44:12 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 10:50:55 2022 ] Epoch 85 Curr Acc: (28934/50919)56.82%
449
+ [ Thu Sep 8 10:50:55 2022 ] Epoch 59 Best Acc 57.48%
450
+ [ Thu Sep 8 10:50:55 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 10:50:55 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 10:57:33 2022 ] Mean training loss: 0.0179.
453
+ [ Thu Sep 8 10:57:33 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 10:57:33 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 11:04:16 2022 ] Epoch 86 Curr Acc: (29233/50919)57.41%
456
+ [ Thu Sep 8 11:04:16 2022 ] Epoch 59 Best Acc 57.48%
457
+ [ Thu Sep 8 11:04:16 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 11:04:16 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 11:10:53 2022 ] Mean training loss: 0.0172.
460
+ [ Thu Sep 8 11:10:53 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 11:10:53 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 11:17:36 2022 ] Epoch 87 Curr Acc: (29174/50919)57.29%
463
+ [ Thu Sep 8 11:17:36 2022 ] Epoch 59 Best Acc 57.48%
464
+ [ Thu Sep 8 11:17:36 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 11:17:36 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 11:24:13 2022 ] Mean training loss: 0.0166.
467
+ [ Thu Sep 8 11:24:13 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 11:24:13 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 11:30:56 2022 ] Epoch 88 Curr Acc: (28578/50919)56.12%
470
+ [ Thu Sep 8 11:30:56 2022 ] Epoch 59 Best Acc 57.48%
471
+ [ Thu Sep 8 11:30:56 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 11:30:56 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 11:37:33 2022 ] Mean training loss: 0.0164.
474
+ [ Thu Sep 8 11:37:33 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 11:37:33 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 11:44:16 2022 ] Epoch 89 Curr Acc: (29062/50919)57.07%
477
+ [ Thu Sep 8 11:44:16 2022 ] Epoch 59 Best Acc 57.48%
478
+ [ Thu Sep 8 11:44:16 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 11:44:16 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 11:50:52 2022 ] Mean training loss: 0.0162.
481
+ [ Thu Sep 8 11:50:52 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 11:50:52 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 11:57:36 2022 ] Epoch 90 Curr Acc: (28627/50919)56.22%
484
+ [ Thu Sep 8 11:57:36 2022 ] Epoch 59 Best Acc 57.48%
485
+ [ Thu Sep 8 11:57:36 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 11:57:36 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 12:04:11 2022 ] Mean training loss: 0.0158.
488
+ [ Thu Sep 8 12:04:11 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 12:04:11 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 12:10:55 2022 ] Epoch 91 Curr Acc: (29127/50919)57.20%
491
+ [ Thu Sep 8 12:10:55 2022 ] Epoch 59 Best Acc 57.48%
492
+ [ Thu Sep 8 12:10:55 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 12:10:55 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 12:17:32 2022 ] Mean training loss: 0.0150.
495
+ [ Thu Sep 8 12:17:32 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 12:17:32 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 12:24:15 2022 ] Epoch 92 Curr Acc: (28910/50919)56.78%
498
+ [ Thu Sep 8 12:24:15 2022 ] Epoch 59 Best Acc 57.48%
499
+ [ Thu Sep 8 12:24:15 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 12:24:15 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 12:30:52 2022 ] Mean training loss: 0.0155.
502
+ [ Thu Sep 8 12:30:52 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 12:30:52 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 12:37:35 2022 ] Epoch 93 Curr Acc: (29038/50919)57.03%
505
+ [ Thu Sep 8 12:37:35 2022 ] Epoch 59 Best Acc 57.48%
506
+ [ Thu Sep 8 12:37:35 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 12:37:35 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 12:44:12 2022 ] Mean training loss: 0.0147.
509
+ [ Thu Sep 8 12:44:12 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 12:44:12 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 12:50:56 2022 ] Epoch 94 Curr Acc: (28978/50919)56.91%
512
+ [ Thu Sep 8 12:50:56 2022 ] Epoch 59 Best Acc 57.48%
513
+ [ Thu Sep 8 12:50:56 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 12:50:56 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 12:57:31 2022 ] Mean training loss: 0.0147.
516
+ [ Thu Sep 8 12:57:31 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 12:57:31 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 13:04:14 2022 ] Epoch 95 Curr Acc: (29125/50919)57.20%
519
+ [ Thu Sep 8 13:04:14 2022 ] Epoch 59 Best Acc 57.48%
520
+ [ Thu Sep 8 13:04:14 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 13:04:14 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 13:10:51 2022 ] Mean training loss: 0.0153.
523
+ [ Thu Sep 8 13:10:51 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 13:10:51 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 13:17:34 2022 ] Epoch 96 Curr Acc: (28496/50919)55.96%
526
+ [ Thu Sep 8 13:17:34 2022 ] Epoch 59 Best Acc 57.48%
527
+ [ Thu Sep 8 13:17:34 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 13:17:34 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 13:24:11 2022 ] Mean training loss: 0.0149.
530
+ [ Thu Sep 8 13:24:11 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 13:24:11 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 13:30:54 2022 ] Epoch 97 Curr Acc: (29098/50919)57.15%
533
+ [ Thu Sep 8 13:30:54 2022 ] Epoch 59 Best Acc 57.48%
534
+ [ Thu Sep 8 13:30:54 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 13:30:54 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 13:37:31 2022 ] Mean training loss: 0.0138.
537
+ [ Thu Sep 8 13:37:31 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 13:37:31 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 13:44:14 2022 ] Epoch 98 Curr Acc: (28776/50919)56.51%
540
+ [ Thu Sep 8 13:44:14 2022 ] Epoch 59 Best Acc 57.48%
541
+ [ Thu Sep 8 13:44:15 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 13:44:15 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 13:50:51 2022 ] Mean training loss: 0.0146.
544
+ [ Thu Sep 8 13:50:51 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 13:50:51 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 13:57:34 2022 ] Epoch 99 Curr Acc: (28686/50919)56.34%
547
+ [ Thu Sep 8 13:57:34 2022 ] Epoch 59 Best Acc 57.48%
548
+ [ Thu Sep 8 13:57:34 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 13:57:34 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 14:04:11 2022 ] Mean training loss: 0.0147.
551
+ [ Thu Sep 8 14:04:11 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 14:04:11 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 14:10:55 2022 ] Epoch 100 Curr Acc: (29201/50919)57.35%
554
+ [ Thu Sep 8 14:10:55 2022 ] Epoch 59 Best Acc 57.48%
555
+ [ Thu Sep 8 14:10:55 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 14:10:55 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 14:17:32 2022 ] Mean training loss: 0.0139.
558
+ [ Thu Sep 8 14:17:32 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 14:17:32 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 14:24:15 2022 ] Epoch 101 Curr Acc: (28936/50919)56.83%
561
+ [ Thu Sep 8 14:24:15 2022 ] Epoch 59 Best Acc 57.48%
562
+ [ Thu Sep 8 14:24:15 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 14:24:15 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 14:30:51 2022 ] Mean training loss: 0.0150.
565
+ [ Thu Sep 8 14:30:51 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 14:30:52 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 14:37:35 2022 ] Epoch 102 Curr Acc: (28802/50919)56.56%
568
+ [ Thu Sep 8 14:37:35 2022 ] Epoch 59 Best Acc 57.48%
569
+ [ Thu Sep 8 14:37:35 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 14:37:35 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 14:44:13 2022 ] Mean training loss: 0.0158.
572
+ [ Thu Sep 8 14:44:13 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 14:44:13 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 14:50:56 2022 ] Epoch 103 Curr Acc: (28958/50919)56.87%
575
+ [ Thu Sep 8 14:50:56 2022 ] Epoch 59 Best Acc 57.48%
576
+ [ Thu Sep 8 14:50:56 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 14:50:56 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 14:57:32 2022 ] Mean training loss: 0.0145.
579
+ [ Thu Sep 8 14:57:32 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 14:57:32 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 15:04:15 2022 ] Epoch 104 Curr Acc: (29202/50919)57.35%
582
+ [ Thu Sep 8 15:04:15 2022 ] Epoch 59 Best Acc 57.48%
583
+ [ Thu Sep 8 15:04:15 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 15:04:15 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 15:10:51 2022 ] Mean training loss: 0.0143.
586
+ [ Thu Sep 8 15:10:51 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 15:10:51 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 15:17:35 2022 ] Epoch 105 Curr Acc: (28959/50919)56.87%
589
+ [ Thu Sep 8 15:17:35 2022 ] Epoch 59 Best Acc 57.48%
590
+ [ Thu Sep 8 15:17:35 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 15:17:35 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 15:24:10 2022 ] Mean training loss: 0.0137.
593
+ [ Thu Sep 8 15:24:10 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 15:24:10 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 15:30:53 2022 ] Epoch 106 Curr Acc: (29126/50919)57.20%
596
+ [ Thu Sep 8 15:30:53 2022 ] Epoch 59 Best Acc 57.48%
597
+ [ Thu Sep 8 15:30:53 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 15:30:53 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 15:37:29 2022 ] Mean training loss: 0.0153.
600
+ [ Thu Sep 8 15:37:29 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 15:37:29 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 15:44:13 2022 ] Epoch 107 Curr Acc: (29225/50919)57.40%
603
+ [ Thu Sep 8 15:44:13 2022 ] Epoch 59 Best Acc 57.48%
604
+ [ Thu Sep 8 15:44:13 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 15:44:13 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 15:50:49 2022 ] Mean training loss: 0.0148.
607
+ [ Thu Sep 8 15:50:49 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 15:50:49 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 15:57:32 2022 ] Epoch 108 Curr Acc: (29271/50919)57.49%
610
+ [ Thu Sep 8 15:57:32 2022 ] Epoch 108 Best Acc 57.49%
611
+ [ Thu Sep 8 15:57:32 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 15:57:32 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 16:04:07 2022 ] Mean training loss: 0.0138.
614
+ [ Thu Sep 8 16:04:07 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 16:04:07 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 16:10:51 2022 ] Epoch 109 Curr Acc: (28907/50919)56.77%
617
+ [ Thu Sep 8 16:10:51 2022 ] Epoch 108 Best Acc 57.49%
618
+ [ Thu Sep 8 16:10:51 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 16:10:51 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 16:17:26 2022 ] Mean training loss: 0.0146.
621
+ [ Thu Sep 8 16:17:26 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 16:17:26 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 16:24:10 2022 ] Epoch 110 Curr Acc: (28645/50919)56.26%
624
+ [ Thu Sep 8 16:24:10 2022 ] Epoch 108 Best Acc 57.49%
625
+ [ Thu Sep 8 16:24:10 2022 ] epoch: 108, best accuracy: 0.5748541801685029
626
+ [ Thu Sep 8 16:24:10 2022 ] Experiment: ./work_dir/ntu120/xsub_j
627
+ [ Thu Sep 8 16:24:10 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 16:24:10 2022 ] Load weights from ./runs/ntu120/xsub_j/runs-107-210600.pt.
629
+ [ Thu Sep 8 16:24:10 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 16:30:53 2022 ] Epoch 1 Curr Acc: (29271/50919)57.49%
631
+ [ Thu Sep 8 16:30:53 2022 ] Epoch 108 Best Acc 57.49%
ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu120/xsub_jm.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 120
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu120/xsub_jm/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu120/xsub_jm
ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:17ffc874015dadb8c222a343c24de209c0fea9259c1445b38a61d0d35bd29ef7
3
+ size 29946137
ckpt/Others/MST-GCN/ntu120_xsub/xsub_jm/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Wed Sep 7 21:34:56 2022 ] # generator parameters: 2.922995 M.
2
+ [ Wed Sep 7 21:34:57 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu120/xsub_jm', 'model_saved_name': './runs/ntu120/xsub_jm/runs', 'config': 'config/ntu120/xsub_jm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu120/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 120, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Wed Sep 7 21:34:57 2022 ] Training epoch: 1
6
+ [ Wed Sep 7 21:34:57 2022 ] Learning rate: 0.015
7
+ [ Wed Sep 7 21:41:33 2022 ] Mean training loss: 3.5505.
8
+ [ Wed Sep 7 21:41:33 2022 ] Time consumption: [Data]01%, [Network]99%
9
+ [ Wed Sep 7 21:41:33 2022 ] Training epoch: 2
10
+ [ Wed Sep 7 21:41:33 2022 ] Learning rate: 0.03
11
+ [ Wed Sep 7 21:48:10 2022 ] Mean training loss: 2.5463.
12
+ [ Wed Sep 7 21:48:10 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Wed Sep 7 21:48:10 2022 ] Training epoch: 3
14
+ [ Wed Sep 7 21:48:10 2022 ] Learning rate: 0.045
15
+ [ Wed Sep 7 21:54:48 2022 ] Mean training loss: 1.9928.
16
+ [ Wed Sep 7 21:54:48 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Wed Sep 7 21:54:48 2022 ] Training epoch: 4
18
+ [ Wed Sep 7 21:54:48 2022 ] Learning rate: 0.06
19
+ [ Wed Sep 7 22:01:25 2022 ] Mean training loss: 1.6971.
20
+ [ Wed Sep 7 22:01:25 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Wed Sep 7 22:01:25 2022 ] Training epoch: 5
22
+ [ Wed Sep 7 22:01:25 2022 ] Learning rate: 0.075
23
+ [ Wed Sep 7 22:08:02 2022 ] Mean training loss: 1.5555.
24
+ [ Wed Sep 7 22:08:02 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Wed Sep 7 22:08:02 2022 ] Training epoch: 6
26
+ [ Wed Sep 7 22:08:02 2022 ] Learning rate: 0.09
27
+ [ Wed Sep 7 22:14:38 2022 ] Mean training loss: 1.4570.
28
+ [ Wed Sep 7 22:14:38 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Wed Sep 7 22:14:38 2022 ] Training epoch: 7
30
+ [ Wed Sep 7 22:14:38 2022 ] Learning rate: 0.10500000000000001
31
+ [ Wed Sep 7 22:21:16 2022 ] Mean training loss: 1.3999.
32
+ [ Wed Sep 7 22:21:16 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Wed Sep 7 22:21:16 2022 ] Training epoch: 8
34
+ [ Wed Sep 7 22:21:16 2022 ] Learning rate: 0.12
35
+ [ Wed Sep 7 22:27:54 2022 ] Mean training loss: 1.3780.
36
+ [ Wed Sep 7 22:27:54 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Wed Sep 7 22:27:54 2022 ] Training epoch: 9
38
+ [ Wed Sep 7 22:27:54 2022 ] Learning rate: 0.13499999999999998
39
+ [ Wed Sep 7 22:34:31 2022 ] Mean training loss: 1.3462.
40
+ [ Wed Sep 7 22:34:31 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Wed Sep 7 22:34:31 2022 ] Training epoch: 10
42
+ [ Wed Sep 7 22:34:31 2022 ] Learning rate: 0.15
43
+ [ Wed Sep 7 22:41:08 2022 ] Mean training loss: 1.3458.
44
+ [ Wed Sep 7 22:41:08 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Wed Sep 7 22:41:08 2022 ] Training epoch: 11
46
+ [ Wed Sep 7 22:41:08 2022 ] Learning rate: 0.15
47
+ [ Wed Sep 7 22:47:45 2022 ] Mean training loss: 1.2974.
48
+ [ Wed Sep 7 22:47:45 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Wed Sep 7 22:47:45 2022 ] Training epoch: 12
50
+ [ Wed Sep 7 22:47:45 2022 ] Learning rate: 0.15
51
+ [ Wed Sep 7 22:54:22 2022 ] Mean training loss: 1.2424.
52
+ [ Wed Sep 7 22:54:22 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Wed Sep 7 22:54:22 2022 ] Training epoch: 13
54
+ [ Wed Sep 7 22:54:22 2022 ] Learning rate: 0.15
55
+ [ Wed Sep 7 23:00:59 2022 ] Mean training loss: 1.2103.
56
+ [ Wed Sep 7 23:00:59 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Wed Sep 7 23:00:59 2022 ] Training epoch: 14
58
+ [ Wed Sep 7 23:00:59 2022 ] Learning rate: 0.15
59
+ [ Wed Sep 7 23:07:37 2022 ] Mean training loss: 1.1999.
60
+ [ Wed Sep 7 23:07:37 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Wed Sep 7 23:07:37 2022 ] Training epoch: 15
62
+ [ Wed Sep 7 23:07:37 2022 ] Learning rate: 0.15
63
+ [ Wed Sep 7 23:14:14 2022 ] Mean training loss: 1.1591.
64
+ [ Wed Sep 7 23:14:14 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Wed Sep 7 23:14:14 2022 ] Training epoch: 16
66
+ [ Wed Sep 7 23:14:14 2022 ] Learning rate: 0.15
67
+ [ Wed Sep 7 23:20:51 2022 ] Mean training loss: 1.1549.
68
+ [ Wed Sep 7 23:20:51 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Wed Sep 7 23:20:51 2022 ] Training epoch: 17
70
+ [ Wed Sep 7 23:20:51 2022 ] Learning rate: 0.15
71
+ [ Wed Sep 7 23:27:28 2022 ] Mean training loss: 1.1448.
72
+ [ Wed Sep 7 23:27:28 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Wed Sep 7 23:27:28 2022 ] Training epoch: 18
74
+ [ Wed Sep 7 23:27:28 2022 ] Learning rate: 0.15
75
+ [ Wed Sep 7 23:34:05 2022 ] Mean training loss: 1.1075.
76
+ [ Wed Sep 7 23:34:05 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Wed Sep 7 23:34:05 2022 ] Training epoch: 19
78
+ [ Wed Sep 7 23:34:05 2022 ] Learning rate: 0.15
79
+ [ Wed Sep 7 23:40:40 2022 ] Mean training loss: 1.1084.
80
+ [ Wed Sep 7 23:40:40 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Wed Sep 7 23:40:40 2022 ] Training epoch: 20
82
+ [ Wed Sep 7 23:40:40 2022 ] Learning rate: 0.15
83
+ [ Wed Sep 7 23:47:16 2022 ] Mean training loss: 1.0991.
84
+ [ Wed Sep 7 23:47:16 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Wed Sep 7 23:47:16 2022 ] Training epoch: 21
86
+ [ Wed Sep 7 23:47:16 2022 ] Learning rate: 0.15
87
+ [ Wed Sep 7 23:53:54 2022 ] Mean training loss: 1.0842.
88
+ [ Wed Sep 7 23:53:54 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Wed Sep 7 23:53:54 2022 ] Training epoch: 22
90
+ [ Wed Sep 7 23:53:54 2022 ] Learning rate: 0.15
91
+ [ Thu Sep 8 00:00:32 2022 ] Mean training loss: 1.0714.
92
+ [ Thu Sep 8 00:00:32 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Thu Sep 8 00:00:32 2022 ] Training epoch: 23
94
+ [ Thu Sep 8 00:00:32 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 00:07:10 2022 ] Mean training loss: 1.0559.
96
+ [ Thu Sep 8 00:07:10 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Thu Sep 8 00:07:10 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 00:07:10 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 00:13:46 2022 ] Mean training loss: 1.0471.
100
+ [ Thu Sep 8 00:13:46 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Thu Sep 8 00:13:46 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 00:13:46 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 00:20:23 2022 ] Mean training loss: 1.0418.
104
+ [ Thu Sep 8 00:20:23 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Thu Sep 8 00:20:23 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 00:20:23 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 00:27:00 2022 ] Mean training loss: 1.0377.
108
+ [ Thu Sep 8 00:27:00 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Thu Sep 8 00:27:00 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 00:27:00 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 00:33:37 2022 ] Mean training loss: 1.0365.
112
+ [ Thu Sep 8 00:33:37 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Thu Sep 8 00:33:37 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 00:33:37 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 00:40:14 2022 ] Mean training loss: 1.0270.
116
+ [ Thu Sep 8 00:40:14 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Thu Sep 8 00:40:14 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 00:40:14 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 00:46:52 2022 ] Mean training loss: 1.0189.
120
+ [ Thu Sep 8 00:46:52 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Thu Sep 8 00:46:52 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 00:46:52 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 00:53:28 2022 ] Mean training loss: 1.0129.
124
+ [ Thu Sep 8 00:53:28 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Thu Sep 8 00:53:28 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 00:53:28 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 01:00:05 2022 ] Mean training loss: 1.0095.
128
+ [ Thu Sep 8 01:00:05 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Thu Sep 8 01:00:05 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 01:00:05 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 01:06:41 2022 ] Mean training loss: 1.0034.
132
+ [ Thu Sep 8 01:06:41 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Thu Sep 8 01:06:41 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 01:06:41 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 01:13:16 2022 ] Mean training loss: 1.0090.
136
+ [ Thu Sep 8 01:13:16 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 01:13:16 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 01:13:16 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 01:19:53 2022 ] Mean training loss: 0.9960.
140
+ [ Thu Sep 8 01:19:53 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 01:19:53 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 01:19:53 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 01:26:30 2022 ] Mean training loss: 1.0126.
144
+ [ Thu Sep 8 01:26:30 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 01:26:30 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 01:26:30 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 01:33:08 2022 ] Mean training loss: 0.9907.
148
+ [ Thu Sep 8 01:33:08 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 01:33:08 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 01:33:08 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 01:39:47 2022 ] Mean training loss: 0.9857.
152
+ [ Thu Sep 8 01:39:47 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 01:39:47 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 01:39:47 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 01:46:26 2022 ] Mean training loss: 0.9833.
156
+ [ Thu Sep 8 01:46:26 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 01:46:26 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 01:46:26 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 01:53:05 2022 ] Mean training loss: 0.9808.
160
+ [ Thu Sep 8 01:53:05 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 01:53:05 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 01:53:05 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 01:59:44 2022 ] Mean training loss: 0.9796.
164
+ [ Thu Sep 8 01:59:44 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 01:59:44 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 01:59:44 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 02:06:23 2022 ] Mean training loss: 0.9781.
168
+ [ Thu Sep 8 02:06:23 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 02:06:23 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 02:06:23 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 02:13:02 2022 ] Mean training loss: 0.9829.
172
+ [ Thu Sep 8 02:13:02 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 02:13:02 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 02:13:02 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 02:19:40 2022 ] Mean training loss: 0.9875.
176
+ [ Thu Sep 8 02:19:40 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 02:19:40 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 02:19:40 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 02:26:17 2022 ] Mean training loss: 0.9765.
180
+ [ Thu Sep 8 02:26:17 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 02:26:17 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 02:26:17 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 02:32:55 2022 ] Mean training loss: 0.9734.
184
+ [ Thu Sep 8 02:32:55 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 02:32:55 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 02:32:55 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 02:39:33 2022 ] Mean training loss: 0.9846.
188
+ [ Thu Sep 8 02:39:33 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 02:39:33 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 02:39:33 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 02:46:11 2022 ] Mean training loss: 0.9679.
192
+ [ Thu Sep 8 02:46:11 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 02:46:11 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 02:46:11 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 02:52:48 2022 ] Mean training loss: 0.9770.
196
+ [ Thu Sep 8 02:52:48 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 02:52:48 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 02:52:48 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 02:59:25 2022 ] Mean training loss: 0.9773.
200
+ [ Thu Sep 8 02:59:25 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 02:59:25 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 02:59:25 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 03:06:03 2022 ] Mean training loss: 0.9652.
204
+ [ Thu Sep 8 03:06:03 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 03:06:03 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 03:06:03 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 03:12:39 2022 ] Mean training loss: 0.5024.
208
+ [ Thu Sep 8 03:12:39 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 03:12:39 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 03:19:30 2022 ] Epoch 51 Curr Acc: (26427/50919)51.90%
211
+ [ Thu Sep 8 03:19:30 2022 ] Epoch 51 Best Acc 51.90%
212
+ [ Thu Sep 8 03:19:30 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 03:19:30 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 03:26:05 2022 ] Mean training loss: 0.3750.
215
+ [ Thu Sep 8 03:26:05 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 03:26:05 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 03:32:48 2022 ] Epoch 52 Curr Acc: (27217/50919)53.45%
218
+ [ Thu Sep 8 03:32:48 2022 ] Epoch 52 Best Acc 53.45%
219
+ [ Thu Sep 8 03:32:48 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 03:32:48 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 03:39:23 2022 ] Mean training loss: 0.3166.
222
+ [ Thu Sep 8 03:39:23 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 03:39:24 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 03:46:06 2022 ] Epoch 53 Curr Acc: (27711/50919)54.42%
225
+ [ Thu Sep 8 03:46:06 2022 ] Epoch 53 Best Acc 54.42%
226
+ [ Thu Sep 8 03:46:06 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 03:46:06 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 03:52:42 2022 ] Mean training loss: 0.2698.
229
+ [ Thu Sep 8 03:52:42 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 03:52:43 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 03:59:25 2022 ] Epoch 54 Curr Acc: (27776/50919)54.55%
232
+ [ Thu Sep 8 03:59:25 2022 ] Epoch 54 Best Acc 54.55%
233
+ [ Thu Sep 8 03:59:25 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 03:59:25 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 04:06:01 2022 ] Mean training loss: 0.2366.
236
+ [ Thu Sep 8 04:06:01 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 04:06:02 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 04:12:44 2022 ] Epoch 55 Curr Acc: (27484/50919)53.98%
239
+ [ Thu Sep 8 04:12:44 2022 ] Epoch 54 Best Acc 54.55%
240
+ [ Thu Sep 8 04:12:44 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 04:12:44 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 04:19:21 2022 ] Mean training loss: 0.2079.
243
+ [ Thu Sep 8 04:19:21 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 04:19:21 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 04:26:03 2022 ] Epoch 56 Curr Acc: (26107/50919)51.27%
246
+ [ Thu Sep 8 04:26:03 2022 ] Epoch 54 Best Acc 54.55%
247
+ [ Thu Sep 8 04:26:03 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 04:26:03 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 04:32:38 2022 ] Mean training loss: 0.1758.
250
+ [ Thu Sep 8 04:32:38 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 04:32:38 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 04:39:21 2022 ] Epoch 57 Curr Acc: (27395/50919)53.80%
253
+ [ Thu Sep 8 04:39:21 2022 ] Epoch 54 Best Acc 54.55%
254
+ [ Thu Sep 8 04:39:21 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 04:39:21 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 04:45:55 2022 ] Mean training loss: 0.1576.
257
+ [ Thu Sep 8 04:45:55 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 04:45:55 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 04:52:38 2022 ] Epoch 58 Curr Acc: (26128/50919)51.31%
260
+ [ Thu Sep 8 04:52:38 2022 ] Epoch 54 Best Acc 54.55%
261
+ [ Thu Sep 8 04:52:38 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 04:52:38 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 04:59:12 2022 ] Mean training loss: 0.1435.
264
+ [ Thu Sep 8 04:59:12 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 04:59:12 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 05:05:55 2022 ] Epoch 59 Curr Acc: (27495/50919)54.00%
267
+ [ Thu Sep 8 05:05:55 2022 ] Epoch 54 Best Acc 54.55%
268
+ [ Thu Sep 8 05:05:55 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 05:05:55 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 05:12:31 2022 ] Mean training loss: 0.1249.
271
+ [ Thu Sep 8 05:12:31 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 05:12:31 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 05:19:14 2022 ] Epoch 60 Curr Acc: (26375/50919)51.80%
274
+ [ Thu Sep 8 05:19:14 2022 ] Epoch 54 Best Acc 54.55%
275
+ [ Thu Sep 8 05:19:14 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 05:19:14 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 05:25:49 2022 ] Mean training loss: 0.1140.
278
+ [ Thu Sep 8 05:25:49 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 05:25:49 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 05:32:32 2022 ] Epoch 61 Curr Acc: (26778/50919)52.59%
281
+ [ Thu Sep 8 05:32:32 2022 ] Epoch 54 Best Acc 54.55%
282
+ [ Thu Sep 8 05:32:32 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 05:32:32 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 05:39:08 2022 ] Mean training loss: 0.1064.
285
+ [ Thu Sep 8 05:39:08 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 05:39:08 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 05:45:51 2022 ] Epoch 62 Curr Acc: (27000/50919)53.03%
288
+ [ Thu Sep 8 05:45:51 2022 ] Epoch 54 Best Acc 54.55%
289
+ [ Thu Sep 8 05:45:51 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 05:45:51 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 05:52:26 2022 ] Mean training loss: 0.1195.
292
+ [ Thu Sep 8 05:52:26 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 05:52:26 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 05:59:09 2022 ] Epoch 63 Curr Acc: (26677/50919)52.39%
295
+ [ Thu Sep 8 05:59:09 2022 ] Epoch 54 Best Acc 54.55%
296
+ [ Thu Sep 8 05:59:09 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 05:59:09 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 06:05:45 2022 ] Mean training loss: 0.1119.
299
+ [ Thu Sep 8 06:05:45 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 06:05:45 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 06:12:28 2022 ] Epoch 64 Curr Acc: (25515/50919)50.11%
302
+ [ Thu Sep 8 06:12:28 2022 ] Epoch 54 Best Acc 54.55%
303
+ [ Thu Sep 8 06:12:28 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 06:12:28 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 06:19:01 2022 ] Mean training loss: 0.1071.
306
+ [ Thu Sep 8 06:19:01 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 06:19:01 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 06:25:43 2022 ] Epoch 65 Curr Acc: (26535/50919)52.11%
309
+ [ Thu Sep 8 06:25:43 2022 ] Epoch 54 Best Acc 54.55%
310
+ [ Thu Sep 8 06:25:44 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 06:25:44 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 06:32:19 2022 ] Mean training loss: 0.1063.
313
+ [ Thu Sep 8 06:32:19 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 06:32:19 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 06:39:01 2022 ] Epoch 66 Curr Acc: (26613/50919)52.27%
316
+ [ Thu Sep 8 06:39:01 2022 ] Epoch 54 Best Acc 54.55%
317
+ [ Thu Sep 8 06:39:01 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 06:39:01 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 06:45:37 2022 ] Mean training loss: 0.1039.
320
+ [ Thu Sep 8 06:45:37 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 06:45:37 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 06:52:20 2022 ] Epoch 67 Curr Acc: (25165/50919)49.42%
323
+ [ Thu Sep 8 06:52:20 2022 ] Epoch 54 Best Acc 54.55%
324
+ [ Thu Sep 8 06:52:20 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 06:52:20 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 06:58:55 2022 ] Mean training loss: 0.1104.
327
+ [ Thu Sep 8 06:58:55 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 06:58:55 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 07:05:38 2022 ] Epoch 68 Curr Acc: (26270/50919)51.59%
330
+ [ Thu Sep 8 07:05:38 2022 ] Epoch 54 Best Acc 54.55%
331
+ [ Thu Sep 8 07:05:38 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 07:05:38 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 07:12:13 2022 ] Mean training loss: 0.0960.
334
+ [ Thu Sep 8 07:12:13 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 07:12:13 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 07:18:55 2022 ] Epoch 69 Curr Acc: (26027/50919)51.11%
337
+ [ Thu Sep 8 07:18:55 2022 ] Epoch 54 Best Acc 54.55%
338
+ [ Thu Sep 8 07:18:55 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 07:18:55 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 07:25:31 2022 ] Mean training loss: 0.1008.
341
+ [ Thu Sep 8 07:25:31 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 07:25:31 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 07:32:13 2022 ] Epoch 70 Curr Acc: (25435/50919)49.95%
344
+ [ Thu Sep 8 07:32:13 2022 ] Epoch 54 Best Acc 54.55%
345
+ [ Thu Sep 8 07:32:13 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 07:32:13 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 07:38:48 2022 ] Mean training loss: 0.0533.
348
+ [ Thu Sep 8 07:38:48 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 07:38:48 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 07:45:31 2022 ] Epoch 71 Curr Acc: (27067/50919)53.16%
351
+ [ Thu Sep 8 07:45:31 2022 ] Epoch 54 Best Acc 54.55%
352
+ [ Thu Sep 8 07:45:31 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 07:45:31 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 07:52:07 2022 ] Mean training loss: 0.0343.
355
+ [ Thu Sep 8 07:52:07 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 07:52:07 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 07:58:50 2022 ] Epoch 72 Curr Acc: (27244/50919)53.50%
358
+ [ Thu Sep 8 07:58:50 2022 ] Epoch 54 Best Acc 54.55%
359
+ [ Thu Sep 8 07:58:50 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 07:58:50 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 08:05:26 2022 ] Mean training loss: 0.0275.
362
+ [ Thu Sep 8 08:05:26 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 08:05:26 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 08:12:08 2022 ] Epoch 73 Curr Acc: (27387/50919)53.79%
365
+ [ Thu Sep 8 08:12:08 2022 ] Epoch 54 Best Acc 54.55%
366
+ [ Thu Sep 8 08:12:08 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 08:12:08 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 08:18:44 2022 ] Mean training loss: 0.0236.
369
+ [ Thu Sep 8 08:18:44 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 08:18:44 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 08:25:27 2022 ] Epoch 74 Curr Acc: (26749/50919)52.53%
372
+ [ Thu Sep 8 08:25:27 2022 ] Epoch 54 Best Acc 54.55%
373
+ [ Thu Sep 8 08:25:27 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 08:25:27 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 08:32:03 2022 ] Mean training loss: 0.0219.
376
+ [ Thu Sep 8 08:32:03 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 08:32:03 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 08:38:45 2022 ] Epoch 75 Curr Acc: (27462/50919)53.93%
379
+ [ Thu Sep 8 08:38:45 2022 ] Epoch 54 Best Acc 54.55%
380
+ [ Thu Sep 8 08:38:45 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 08:38:45 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 08:45:21 2022 ] Mean training loss: 0.0225.
383
+ [ Thu Sep 8 08:45:21 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 08:45:21 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 08:52:04 2022 ] Epoch 76 Curr Acc: (26647/50919)52.33%
386
+ [ Thu Sep 8 08:52:04 2022 ] Epoch 54 Best Acc 54.55%
387
+ [ Thu Sep 8 08:52:04 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 08:52:04 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 08:58:40 2022 ] Mean training loss: 0.0201.
390
+ [ Thu Sep 8 08:58:40 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 08:58:40 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 09:05:22 2022 ] Epoch 77 Curr Acc: (27279/50919)53.57%
393
+ [ Thu Sep 8 09:05:22 2022 ] Epoch 54 Best Acc 54.55%
394
+ [ Thu Sep 8 09:05:22 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 09:05:22 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 09:11:58 2022 ] Mean training loss: 0.0191.
397
+ [ Thu Sep 8 09:11:58 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 09:11:58 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 09:18:40 2022 ] Epoch 78 Curr Acc: (27376/50919)53.76%
400
+ [ Thu Sep 8 09:18:40 2022 ] Epoch 54 Best Acc 54.55%
401
+ [ Thu Sep 8 09:18:41 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 09:18:41 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 09:25:16 2022 ] Mean training loss: 0.0169.
404
+ [ Thu Sep 8 09:25:16 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 09:25:16 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 09:31:59 2022 ] Epoch 79 Curr Acc: (27226/50919)53.47%
407
+ [ Thu Sep 8 09:31:59 2022 ] Epoch 54 Best Acc 54.55%
408
+ [ Thu Sep 8 09:31:59 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 09:31:59 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 09:38:34 2022 ] Mean training loss: 0.0154.
411
+ [ Thu Sep 8 09:38:34 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 09:38:34 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 09:45:16 2022 ] Epoch 80 Curr Acc: (26624/50919)52.29%
414
+ [ Thu Sep 8 09:45:16 2022 ] Epoch 54 Best Acc 54.55%
415
+ [ Thu Sep 8 09:45:16 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 09:45:16 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 09:51:52 2022 ] Mean training loss: 0.0160.
418
+ [ Thu Sep 8 09:51:52 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 09:51:52 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 09:58:34 2022 ] Epoch 81 Curr Acc: (27010/50919)53.05%
421
+ [ Thu Sep 8 09:58:34 2022 ] Epoch 54 Best Acc 54.55%
422
+ [ Thu Sep 8 09:58:34 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 09:58:34 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 10:05:09 2022 ] Mean training loss: 0.0151.
425
+ [ Thu Sep 8 10:05:09 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 10:05:09 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 10:11:52 2022 ] Epoch 82 Curr Acc: (27009/50919)53.04%
428
+ [ Thu Sep 8 10:11:52 2022 ] Epoch 54 Best Acc 54.55%
429
+ [ Thu Sep 8 10:11:52 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 10:11:52 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 10:18:27 2022 ] Mean training loss: 0.0148.
432
+ [ Thu Sep 8 10:18:27 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 10:18:27 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 10:25:10 2022 ] Epoch 83 Curr Acc: (27217/50919)53.45%
435
+ [ Thu Sep 8 10:25:10 2022 ] Epoch 54 Best Acc 54.55%
436
+ [ Thu Sep 8 10:25:10 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 10:25:10 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 10:31:44 2022 ] Mean training loss: 0.0149.
439
+ [ Thu Sep 8 10:31:44 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 10:31:44 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 10:38:27 2022 ] Epoch 84 Curr Acc: (27020/50919)53.06%
442
+ [ Thu Sep 8 10:38:27 2022 ] Epoch 54 Best Acc 54.55%
443
+ [ Thu Sep 8 10:38:27 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 10:38:27 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 10:45:01 2022 ] Mean training loss: 0.0148.
446
+ [ Thu Sep 8 10:45:01 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 10:45:01 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 10:51:44 2022 ] Epoch 85 Curr Acc: (27510/50919)54.03%
449
+ [ Thu Sep 8 10:51:44 2022 ] Epoch 54 Best Acc 54.55%
450
+ [ Thu Sep 8 10:51:44 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 10:51:44 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 10:58:19 2022 ] Mean training loss: 0.0138.
453
+ [ Thu Sep 8 10:58:19 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 10:58:19 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 11:05:02 2022 ] Epoch 86 Curr Acc: (27559/50919)54.12%
456
+ [ Thu Sep 8 11:05:02 2022 ] Epoch 54 Best Acc 54.55%
457
+ [ Thu Sep 8 11:05:02 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 11:05:02 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 11:11:38 2022 ] Mean training loss: 0.0150.
460
+ [ Thu Sep 8 11:11:38 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 11:11:38 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 11:18:21 2022 ] Epoch 87 Curr Acc: (27731/50919)54.46%
463
+ [ Thu Sep 8 11:18:21 2022 ] Epoch 54 Best Acc 54.55%
464
+ [ Thu Sep 8 11:18:21 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 11:18:21 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 11:24:56 2022 ] Mean training loss: 0.0126.
467
+ [ Thu Sep 8 11:24:56 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 11:24:56 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 11:31:39 2022 ] Epoch 88 Curr Acc: (27229/50919)53.48%
470
+ [ Thu Sep 8 11:31:39 2022 ] Epoch 54 Best Acc 54.55%
471
+ [ Thu Sep 8 11:31:39 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 11:31:39 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 11:38:15 2022 ] Mean training loss: 0.0139.
474
+ [ Thu Sep 8 11:38:15 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 11:38:15 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 11:44:58 2022 ] Epoch 89 Curr Acc: (27607/50919)54.22%
477
+ [ Thu Sep 8 11:44:58 2022 ] Epoch 54 Best Acc 54.55%
478
+ [ Thu Sep 8 11:44:58 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 11:44:58 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 11:51:34 2022 ] Mean training loss: 0.0133.
481
+ [ Thu Sep 8 11:51:34 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 11:51:34 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 11:58:17 2022 ] Epoch 90 Curr Acc: (27307/50919)53.63%
484
+ [ Thu Sep 8 11:58:17 2022 ] Epoch 54 Best Acc 54.55%
485
+ [ Thu Sep 8 11:58:17 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 11:58:17 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 12:04:54 2022 ] Mean training loss: 0.0128.
488
+ [ Thu Sep 8 12:04:54 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 12:04:54 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 12:11:37 2022 ] Epoch 91 Curr Acc: (27251/50919)53.52%
491
+ [ Thu Sep 8 12:11:37 2022 ] Epoch 54 Best Acc 54.55%
492
+ [ Thu Sep 8 12:11:37 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 12:11:37 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 12:18:13 2022 ] Mean training loss: 0.0131.
495
+ [ Thu Sep 8 12:18:13 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 12:18:13 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 12:24:56 2022 ] Epoch 92 Curr Acc: (27446/50919)53.90%
498
+ [ Thu Sep 8 12:24:56 2022 ] Epoch 54 Best Acc 54.55%
499
+ [ Thu Sep 8 12:24:56 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 12:24:56 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 12:31:32 2022 ] Mean training loss: 0.0135.
502
+ [ Thu Sep 8 12:31:32 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 12:31:32 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 12:38:16 2022 ] Epoch 93 Curr Acc: (26946/50919)52.92%
505
+ [ Thu Sep 8 12:38:16 2022 ] Epoch 54 Best Acc 54.55%
506
+ [ Thu Sep 8 12:38:16 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 12:38:16 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 12:44:51 2022 ] Mean training loss: 0.0133.
509
+ [ Thu Sep 8 12:44:51 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Thu Sep 8 12:44:51 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 12:51:34 2022 ] Epoch 94 Curr Acc: (27578/50919)54.16%
512
+ [ Thu Sep 8 12:51:34 2022 ] Epoch 54 Best Acc 54.55%
513
+ [ Thu Sep 8 12:51:35 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 12:51:35 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Thu Sep 8 12:58:10 2022 ] Mean training loss: 0.0123.
516
+ [ Thu Sep 8 12:58:10 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Thu Sep 8 12:58:10 2022 ] Eval epoch: 95
518
+ [ Thu Sep 8 13:04:53 2022 ] Epoch 95 Curr Acc: (26810/50919)52.65%
519
+ [ Thu Sep 8 13:04:53 2022 ] Epoch 54 Best Acc 54.55%
520
+ [ Thu Sep 8 13:04:53 2022 ] Training epoch: 96
521
+ [ Thu Sep 8 13:04:53 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Thu Sep 8 13:11:27 2022 ] Mean training loss: 0.0134.
523
+ [ Thu Sep 8 13:11:27 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Thu Sep 8 13:11:27 2022 ] Eval epoch: 96
525
+ [ Thu Sep 8 13:18:09 2022 ] Epoch 96 Curr Acc: (25210/50919)49.51%
526
+ [ Thu Sep 8 13:18:09 2022 ] Epoch 54 Best Acc 54.55%
527
+ [ Thu Sep 8 13:18:09 2022 ] Training epoch: 97
528
+ [ Thu Sep 8 13:18:09 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Thu Sep 8 13:24:44 2022 ] Mean training loss: 0.0124.
530
+ [ Thu Sep 8 13:24:44 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Thu Sep 8 13:24:44 2022 ] Eval epoch: 97
532
+ [ Thu Sep 8 13:31:27 2022 ] Epoch 97 Curr Acc: (27151/50919)53.32%
533
+ [ Thu Sep 8 13:31:27 2022 ] Epoch 54 Best Acc 54.55%
534
+ [ Thu Sep 8 13:31:27 2022 ] Training epoch: 98
535
+ [ Thu Sep 8 13:31:27 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Thu Sep 8 13:38:02 2022 ] Mean training loss: 0.0127.
537
+ [ Thu Sep 8 13:38:02 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Thu Sep 8 13:38:02 2022 ] Eval epoch: 98
539
+ [ Thu Sep 8 13:44:45 2022 ] Epoch 98 Curr Acc: (27226/50919)53.47%
540
+ [ Thu Sep 8 13:44:45 2022 ] Epoch 54 Best Acc 54.55%
541
+ [ Thu Sep 8 13:44:45 2022 ] Training epoch: 99
542
+ [ Thu Sep 8 13:44:45 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Thu Sep 8 13:51:18 2022 ] Mean training loss: 0.0125.
544
+ [ Thu Sep 8 13:51:18 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Thu Sep 8 13:51:18 2022 ] Eval epoch: 99
546
+ [ Thu Sep 8 13:58:01 2022 ] Epoch 99 Curr Acc: (26979/50919)52.98%
547
+ [ Thu Sep 8 13:58:01 2022 ] Epoch 54 Best Acc 54.55%
548
+ [ Thu Sep 8 13:58:01 2022 ] Training epoch: 100
549
+ [ Thu Sep 8 13:58:01 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Thu Sep 8 14:04:34 2022 ] Mean training loss: 0.0120.
551
+ [ Thu Sep 8 14:04:34 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Thu Sep 8 14:04:35 2022 ] Eval epoch: 100
553
+ [ Thu Sep 8 14:11:17 2022 ] Epoch 100 Curr Acc: (27486/50919)53.98%
554
+ [ Thu Sep 8 14:11:17 2022 ] Epoch 54 Best Acc 54.55%
555
+ [ Thu Sep 8 14:11:17 2022 ] Training epoch: 101
556
+ [ Thu Sep 8 14:11:17 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Thu Sep 8 14:17:50 2022 ] Mean training loss: 0.0120.
558
+ [ Thu Sep 8 14:17:50 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Thu Sep 8 14:17:50 2022 ] Eval epoch: 101
560
+ [ Thu Sep 8 14:24:33 2022 ] Epoch 101 Curr Acc: (27276/50919)53.57%
561
+ [ Thu Sep 8 14:24:33 2022 ] Epoch 54 Best Acc 54.55%
562
+ [ Thu Sep 8 14:24:33 2022 ] Training epoch: 102
563
+ [ Thu Sep 8 14:24:33 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Thu Sep 8 14:31:06 2022 ] Mean training loss: 0.0124.
565
+ [ Thu Sep 8 14:31:06 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Thu Sep 8 14:31:06 2022 ] Eval epoch: 102
567
+ [ Thu Sep 8 14:37:49 2022 ] Epoch 102 Curr Acc: (27455/50919)53.92%
568
+ [ Thu Sep 8 14:37:49 2022 ] Epoch 54 Best Acc 54.55%
569
+ [ Thu Sep 8 14:37:49 2022 ] Training epoch: 103
570
+ [ Thu Sep 8 14:37:49 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Thu Sep 8 14:44:24 2022 ] Mean training loss: 0.0122.
572
+ [ Thu Sep 8 14:44:24 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Thu Sep 8 14:44:24 2022 ] Eval epoch: 103
574
+ [ Thu Sep 8 14:51:06 2022 ] Epoch 103 Curr Acc: (27485/50919)53.98%
575
+ [ Thu Sep 8 14:51:06 2022 ] Epoch 54 Best Acc 54.55%
576
+ [ Thu Sep 8 14:51:06 2022 ] Training epoch: 104
577
+ [ Thu Sep 8 14:51:06 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Thu Sep 8 14:57:41 2022 ] Mean training loss: 0.0123.
579
+ [ Thu Sep 8 14:57:41 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Thu Sep 8 14:57:41 2022 ] Eval epoch: 104
581
+ [ Thu Sep 8 15:04:23 2022 ] Epoch 104 Curr Acc: (27552/50919)54.11%
582
+ [ Thu Sep 8 15:04:23 2022 ] Epoch 54 Best Acc 54.55%
583
+ [ Thu Sep 8 15:04:23 2022 ] Training epoch: 105
584
+ [ Thu Sep 8 15:04:23 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Thu Sep 8 15:10:58 2022 ] Mean training loss: 0.0117.
586
+ [ Thu Sep 8 15:10:58 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Thu Sep 8 15:10:58 2022 ] Eval epoch: 105
588
+ [ Thu Sep 8 15:17:41 2022 ] Epoch 105 Curr Acc: (27374/50919)53.76%
589
+ [ Thu Sep 8 15:17:41 2022 ] Epoch 54 Best Acc 54.55%
590
+ [ Thu Sep 8 15:17:41 2022 ] Training epoch: 106
591
+ [ Thu Sep 8 15:17:41 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Thu Sep 8 15:24:15 2022 ] Mean training loss: 0.0133.
593
+ [ Thu Sep 8 15:24:15 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Thu Sep 8 15:24:15 2022 ] Eval epoch: 106
595
+ [ Thu Sep 8 15:30:57 2022 ] Epoch 106 Curr Acc: (27505/50919)54.02%
596
+ [ Thu Sep 8 15:30:57 2022 ] Epoch 54 Best Acc 54.55%
597
+ [ Thu Sep 8 15:30:57 2022 ] Training epoch: 107
598
+ [ Thu Sep 8 15:30:57 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Thu Sep 8 15:37:32 2022 ] Mean training loss: 0.0120.
600
+ [ Thu Sep 8 15:37:32 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Thu Sep 8 15:37:32 2022 ] Eval epoch: 107
602
+ [ Thu Sep 8 15:44:15 2022 ] Epoch 107 Curr Acc: (27720/50919)54.44%
603
+ [ Thu Sep 8 15:44:15 2022 ] Epoch 54 Best Acc 54.55%
604
+ [ Thu Sep 8 15:44:15 2022 ] Training epoch: 108
605
+ [ Thu Sep 8 15:44:15 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Thu Sep 8 15:50:48 2022 ] Mean training loss: 0.0119.
607
+ [ Thu Sep 8 15:50:48 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Thu Sep 8 15:50:48 2022 ] Eval epoch: 108
609
+ [ Thu Sep 8 15:57:30 2022 ] Epoch 108 Curr Acc: (27437/50919)53.88%
610
+ [ Thu Sep 8 15:57:30 2022 ] Epoch 54 Best Acc 54.55%
611
+ [ Thu Sep 8 15:57:30 2022 ] Training epoch: 109
612
+ [ Thu Sep 8 15:57:30 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Thu Sep 8 16:04:04 2022 ] Mean training loss: 0.0114.
614
+ [ Thu Sep 8 16:04:04 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Thu Sep 8 16:04:04 2022 ] Eval epoch: 109
616
+ [ Thu Sep 8 16:10:47 2022 ] Epoch 109 Curr Acc: (27373/50919)53.76%
617
+ [ Thu Sep 8 16:10:47 2022 ] Epoch 54 Best Acc 54.55%
618
+ [ Thu Sep 8 16:10:47 2022 ] Training epoch: 110
619
+ [ Thu Sep 8 16:10:47 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Thu Sep 8 16:17:21 2022 ] Mean training loss: 0.0129.
621
+ [ Thu Sep 8 16:17:21 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Thu Sep 8 16:17:21 2022 ] Eval epoch: 110
623
+ [ Thu Sep 8 16:24:03 2022 ] Epoch 110 Curr Acc: (27047/50919)53.12%
624
+ [ Thu Sep 8 16:24:03 2022 ] Epoch 54 Best Acc 54.55%
625
+ [ Thu Sep 8 16:24:03 2022 ] epoch: 54, best accuracy: 0.5454938235236356
626
+ [ Thu Sep 8 16:24:03 2022 ] Experiment: ./work_dir/ntu120/xsub_jm
627
+ [ Thu Sep 8 16:24:04 2022 ] # generator parameters: 2.922995 M.
628
+ [ Thu Sep 8 16:24:04 2022 ] Load weights from ./runs/ntu120/xsub_jm/runs-53-105300.pt.
629
+ [ Thu Sep 8 16:24:04 2022 ] Eval epoch: 1
630
+ [ Thu Sep 8 16:30:46 2022 ] Epoch 1 Curr Acc: (27776/50919)54.55%
631
+ [ Thu Sep 8 16:30:46 2022 ] Epoch 54 Best Acc 54.55%
ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu/xsub_b.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 60
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu/xsub_b/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu/xsub_b
ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c48c593e2b823f8bd9f72279376db1fd09671dcb5378ac83302708c76e5c8882
3
+ size 4979902
ckpt/Others/MST-GCN/ntu60_xsub/xsub_b/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 8 17:07:44 2022 ] # generator parameters: 2.896055 M.
2
+ [ Thu Sep 8 17:07:44 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu/xsub_b', 'model_saved_name': './runs/ntu/xsub_b/runs', 'config': 'config/ntu/xsub_b.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Thu Sep 8 17:07:44 2022 ] Training epoch: 1
6
+ [ Thu Sep 8 17:07:44 2022 ] Learning rate: 0.015
7
+ [ Thu Sep 8 17:11:06 2022 ] Mean training loss: 3.1116.
8
+ [ Thu Sep 8 17:11:06 2022 ] Time consumption: [Data]02%, [Network]98%
9
+ [ Thu Sep 8 17:11:06 2022 ] Training epoch: 2
10
+ [ Thu Sep 8 17:11:06 2022 ] Learning rate: 0.03
11
+ [ Thu Sep 8 17:14:28 2022 ] Mean training loss: 2.2707.
12
+ [ Thu Sep 8 17:14:28 2022 ] Time consumption: [Data]01%, [Network]98%
13
+ [ Thu Sep 8 17:14:28 2022 ] Training epoch: 3
14
+ [ Thu Sep 8 17:14:28 2022 ] Learning rate: 0.045
15
+ [ Thu Sep 8 17:17:49 2022 ] Mean training loss: 1.8360.
16
+ [ Thu Sep 8 17:17:49 2022 ] Time consumption: [Data]01%, [Network]98%
17
+ [ Thu Sep 8 17:17:49 2022 ] Training epoch: 4
18
+ [ Thu Sep 8 17:17:49 2022 ] Learning rate: 0.06
19
+ [ Thu Sep 8 17:21:11 2022 ] Mean training loss: 1.6257.
20
+ [ Thu Sep 8 17:21:11 2022 ] Time consumption: [Data]01%, [Network]98%
21
+ [ Thu Sep 8 17:21:11 2022 ] Training epoch: 5
22
+ [ Thu Sep 8 17:21:11 2022 ] Learning rate: 0.075
23
+ [ Thu Sep 8 17:24:32 2022 ] Mean training loss: 1.4463.
24
+ [ Thu Sep 8 17:24:32 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Thu Sep 8 17:24:32 2022 ] Training epoch: 6
26
+ [ Thu Sep 8 17:24:32 2022 ] Learning rate: 0.09
27
+ [ Thu Sep 8 17:27:53 2022 ] Mean training loss: 1.3583.
28
+ [ Thu Sep 8 17:27:53 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Thu Sep 8 17:27:53 2022 ] Training epoch: 7
30
+ [ Thu Sep 8 17:27:53 2022 ] Learning rate: 0.10500000000000001
31
+ [ Thu Sep 8 17:31:14 2022 ] Mean training loss: 1.2543.
32
+ [ Thu Sep 8 17:31:14 2022 ] Time consumption: [Data]01%, [Network]98%
33
+ [ Thu Sep 8 17:31:14 2022 ] Training epoch: 8
34
+ [ Thu Sep 8 17:31:14 2022 ] Learning rate: 0.12
35
+ [ Thu Sep 8 17:34:35 2022 ] Mean training loss: 1.1767.
36
+ [ Thu Sep 8 17:34:35 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Thu Sep 8 17:34:35 2022 ] Training epoch: 9
38
+ [ Thu Sep 8 17:34:35 2022 ] Learning rate: 0.13499999999999998
39
+ [ Thu Sep 8 17:37:56 2022 ] Mean training loss: 1.1567.
40
+ [ Thu Sep 8 17:37:56 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Thu Sep 8 17:37:56 2022 ] Training epoch: 10
42
+ [ Thu Sep 8 17:37:56 2022 ] Learning rate: 0.15
43
+ [ Thu Sep 8 17:41:17 2022 ] Mean training loss: 1.1013.
44
+ [ Thu Sep 8 17:41:17 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Thu Sep 8 17:41:17 2022 ] Training epoch: 11
46
+ [ Thu Sep 8 17:41:17 2022 ] Learning rate: 0.15
47
+ [ Thu Sep 8 17:44:38 2022 ] Mean training loss: 1.0563.
48
+ [ Thu Sep 8 17:44:38 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Thu Sep 8 17:44:38 2022 ] Training epoch: 12
50
+ [ Thu Sep 8 17:44:38 2022 ] Learning rate: 0.15
51
+ [ Thu Sep 8 17:48:00 2022 ] Mean training loss: 1.0204.
52
+ [ Thu Sep 8 17:48:00 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Thu Sep 8 17:48:00 2022 ] Training epoch: 13
54
+ [ Thu Sep 8 17:48:00 2022 ] Learning rate: 0.15
55
+ [ Thu Sep 8 17:51:21 2022 ] Mean training loss: 0.9678.
56
+ [ Thu Sep 8 17:51:21 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Thu Sep 8 17:51:21 2022 ] Training epoch: 14
58
+ [ Thu Sep 8 17:51:21 2022 ] Learning rate: 0.15
59
+ [ Thu Sep 8 17:54:42 2022 ] Mean training loss: 0.9507.
60
+ [ Thu Sep 8 17:54:42 2022 ] Time consumption: [Data]01%, [Network]98%
61
+ [ Thu Sep 8 17:54:42 2022 ] Training epoch: 15
62
+ [ Thu Sep 8 17:54:42 2022 ] Learning rate: 0.15
63
+ [ Thu Sep 8 17:58:03 2022 ] Mean training loss: 0.9140.
64
+ [ Thu Sep 8 17:58:03 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Thu Sep 8 17:58:03 2022 ] Training epoch: 16
66
+ [ Thu Sep 8 17:58:03 2022 ] Learning rate: 0.15
67
+ [ Thu Sep 8 18:01:22 2022 ] Mean training loss: 0.8949.
68
+ [ Thu Sep 8 18:01:22 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Thu Sep 8 18:01:22 2022 ] Training epoch: 17
70
+ [ Thu Sep 8 18:01:22 2022 ] Learning rate: 0.15
71
+ [ Thu Sep 8 18:04:42 2022 ] Mean training loss: 0.8668.
72
+ [ Thu Sep 8 18:04:42 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Thu Sep 8 18:04:42 2022 ] Training epoch: 18
74
+ [ Thu Sep 8 18:04:42 2022 ] Learning rate: 0.15
75
+ [ Thu Sep 8 18:08:03 2022 ] Mean training loss: 0.8622.
76
+ [ Thu Sep 8 18:08:03 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Thu Sep 8 18:08:03 2022 ] Training epoch: 19
78
+ [ Thu Sep 8 18:08:03 2022 ] Learning rate: 0.15
79
+ [ Thu Sep 8 18:11:22 2022 ] Mean training loss: 0.8276.
80
+ [ Thu Sep 8 18:11:22 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Thu Sep 8 18:11:22 2022 ] Training epoch: 20
82
+ [ Thu Sep 8 18:11:22 2022 ] Learning rate: 0.15
83
+ [ Thu Sep 8 18:14:43 2022 ] Mean training loss: 0.8280.
84
+ [ Thu Sep 8 18:14:43 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Thu Sep 8 18:14:43 2022 ] Training epoch: 21
86
+ [ Thu Sep 8 18:14:43 2022 ] Learning rate: 0.15
87
+ [ Thu Sep 8 18:18:04 2022 ] Mean training loss: 0.8205.
88
+ [ Thu Sep 8 18:18:04 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Thu Sep 8 18:18:04 2022 ] Training epoch: 22
90
+ [ Thu Sep 8 18:18:04 2022 ] Learning rate: 0.15
91
+ [ Thu Sep 8 18:21:26 2022 ] Mean training loss: 0.8259.
92
+ [ Thu Sep 8 18:21:26 2022 ] Time consumption: [Data]01%, [Network]98%
93
+ [ Thu Sep 8 18:21:26 2022 ] Training epoch: 23
94
+ [ Thu Sep 8 18:21:26 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 18:24:47 2022 ] Mean training loss: 0.8165.
96
+ [ Thu Sep 8 18:24:47 2022 ] Time consumption: [Data]01%, [Network]98%
97
+ [ Thu Sep 8 18:24:47 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 18:24:47 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 18:28:09 2022 ] Mean training loss: 0.7833.
100
+ [ Thu Sep 8 18:28:09 2022 ] Time consumption: [Data]01%, [Network]98%
101
+ [ Thu Sep 8 18:28:09 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 18:28:09 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 18:31:32 2022 ] Mean training loss: 0.7724.
104
+ [ Thu Sep 8 18:31:32 2022 ] Time consumption: [Data]01%, [Network]98%
105
+ [ Thu Sep 8 18:31:32 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 18:31:32 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 18:34:53 2022 ] Mean training loss: 0.7551.
108
+ [ Thu Sep 8 18:34:53 2022 ] Time consumption: [Data]01%, [Network]98%
109
+ [ Thu Sep 8 18:34:53 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 18:34:53 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 18:38:13 2022 ] Mean training loss: 0.7380.
112
+ [ Thu Sep 8 18:38:13 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Thu Sep 8 18:38:13 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 18:38:13 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 18:41:33 2022 ] Mean training loss: 0.7460.
116
+ [ Thu Sep 8 18:41:33 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Thu Sep 8 18:41:33 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 18:41:33 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 18:44:54 2022 ] Mean training loss: 0.7339.
120
+ [ Thu Sep 8 18:44:54 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Thu Sep 8 18:44:54 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 18:44:54 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 18:48:15 2022 ] Mean training loss: 0.7232.
124
+ [ Thu Sep 8 18:48:15 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Thu Sep 8 18:48:15 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 18:48:15 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 18:51:37 2022 ] Mean training loss: 0.7467.
128
+ [ Thu Sep 8 18:51:37 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Thu Sep 8 18:51:37 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 18:51:37 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 18:54:58 2022 ] Mean training loss: 0.7282.
132
+ [ Thu Sep 8 18:54:58 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Thu Sep 8 18:54:58 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 18:54:58 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 18:58:19 2022 ] Mean training loss: 0.7132.
136
+ [ Thu Sep 8 18:58:19 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 18:58:19 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 18:58:19 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 19:01:40 2022 ] Mean training loss: 0.7012.
140
+ [ Thu Sep 8 19:01:40 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 19:01:40 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 19:01:40 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 19:05:01 2022 ] Mean training loss: 0.7128.
144
+ [ Thu Sep 8 19:05:01 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 19:05:01 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 19:05:01 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 19:08:22 2022 ] Mean training loss: 0.6946.
148
+ [ Thu Sep 8 19:08:22 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 19:08:22 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 19:08:22 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 19:11:43 2022 ] Mean training loss: 0.7103.
152
+ [ Thu Sep 8 19:11:43 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 19:11:43 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 19:11:43 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 19:15:05 2022 ] Mean training loss: 0.6890.
156
+ [ Thu Sep 8 19:15:05 2022 ] Time consumption: [Data]02%, [Network]98%
157
+ [ Thu Sep 8 19:15:05 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 19:15:05 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 19:18:24 2022 ] Mean training loss: 0.6835.
160
+ [ Thu Sep 8 19:18:24 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 19:18:24 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 19:18:24 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 19:21:45 2022 ] Mean training loss: 0.6812.
164
+ [ Thu Sep 8 19:21:45 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 19:21:45 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 19:21:45 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 19:25:04 2022 ] Mean training loss: 0.6613.
168
+ [ Thu Sep 8 19:25:04 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 19:25:04 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 19:25:04 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 19:28:24 2022 ] Mean training loss: 0.6751.
172
+ [ Thu Sep 8 19:28:24 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 19:28:24 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 19:28:24 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 19:31:45 2022 ] Mean training loss: 0.6819.
176
+ [ Thu Sep 8 19:31:45 2022 ] Time consumption: [Data]01%, [Network]98%
177
+ [ Thu Sep 8 19:31:45 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 19:31:45 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 19:35:06 2022 ] Mean training loss: 0.6711.
180
+ [ Thu Sep 8 19:35:06 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 19:35:06 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 19:35:06 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 19:38:26 2022 ] Mean training loss: 0.6602.
184
+ [ Thu Sep 8 19:38:26 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 19:38:26 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 19:38:26 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 19:41:48 2022 ] Mean training loss: 0.6408.
188
+ [ Thu Sep 8 19:41:48 2022 ] Time consumption: [Data]01%, [Network]98%
189
+ [ Thu Sep 8 19:41:48 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 19:41:48 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 19:45:10 2022 ] Mean training loss: 0.6448.
192
+ [ Thu Sep 8 19:45:10 2022 ] Time consumption: [Data]02%, [Network]98%
193
+ [ Thu Sep 8 19:45:10 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 19:45:10 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 19:48:33 2022 ] Mean training loss: 0.6538.
196
+ [ Thu Sep 8 19:48:33 2022 ] Time consumption: [Data]02%, [Network]98%
197
+ [ Thu Sep 8 19:48:33 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 19:48:33 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 19:51:55 2022 ] Mean training loss: 0.6332.
200
+ [ Thu Sep 8 19:51:55 2022 ] Time consumption: [Data]02%, [Network]98%
201
+ [ Thu Sep 8 19:51:55 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 19:51:55 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 19:55:17 2022 ] Mean training loss: 0.6572.
204
+ [ Thu Sep 8 19:55:17 2022 ] Time consumption: [Data]02%, [Network]98%
205
+ [ Thu Sep 8 19:55:17 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 19:55:17 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 19:58:39 2022 ] Mean training loss: 0.3288.
208
+ [ Thu Sep 8 19:58:39 2022 ] Time consumption: [Data]02%, [Network]98%
209
+ [ Thu Sep 8 19:58:39 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 20:00:51 2022 ] Epoch 51 Curr Acc: (11075/16487)67.17%
211
+ [ Thu Sep 8 20:00:51 2022 ] Epoch 51 Best Acc 67.17%
212
+ [ Thu Sep 8 20:00:51 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 20:00:51 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 20:04:13 2022 ] Mean training loss: 0.2253.
215
+ [ Thu Sep 8 20:04:13 2022 ] Time consumption: [Data]02%, [Network]98%
216
+ [ Thu Sep 8 20:04:13 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 20:06:26 2022 ] Epoch 52 Curr Acc: (11423/16487)69.28%
218
+ [ Thu Sep 8 20:06:26 2022 ] Epoch 52 Best Acc 69.28%
219
+ [ Thu Sep 8 20:06:26 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 20:06:26 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 20:09:47 2022 ] Mean training loss: 0.1931.
222
+ [ Thu Sep 8 20:09:47 2022 ] Time consumption: [Data]02%, [Network]98%
223
+ [ Thu Sep 8 20:09:47 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 20:12:00 2022 ] Epoch 53 Curr Acc: (11412/16487)69.22%
225
+ [ Thu Sep 8 20:12:00 2022 ] Epoch 52 Best Acc 69.28%
226
+ [ Thu Sep 8 20:12:00 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 20:12:00 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 20:15:21 2022 ] Mean training loss: 0.1663.
229
+ [ Thu Sep 8 20:15:21 2022 ] Time consumption: [Data]02%, [Network]98%
230
+ [ Thu Sep 8 20:15:21 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 20:17:33 2022 ] Epoch 54 Curr Acc: (10686/16487)64.81%
232
+ [ Thu Sep 8 20:17:33 2022 ] Epoch 52 Best Acc 69.28%
233
+ [ Thu Sep 8 20:17:33 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 20:17:33 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 20:20:55 2022 ] Mean training loss: 0.1412.
236
+ [ Thu Sep 8 20:20:55 2022 ] Time consumption: [Data]02%, [Network]98%
237
+ [ Thu Sep 8 20:20:55 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 20:23:07 2022 ] Epoch 55 Curr Acc: (11452/16487)69.46%
239
+ [ Thu Sep 8 20:23:07 2022 ] Epoch 55 Best Acc 69.46%
240
+ [ Thu Sep 8 20:23:07 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 20:23:07 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 20:26:29 2022 ] Mean training loss: 0.1234.
243
+ [ Thu Sep 8 20:26:29 2022 ] Time consumption: [Data]02%, [Network]98%
244
+ [ Thu Sep 8 20:26:29 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 20:28:41 2022 ] Epoch 56 Curr Acc: (10908/16487)66.16%
246
+ [ Thu Sep 8 20:28:41 2022 ] Epoch 55 Best Acc 69.46%
247
+ [ Thu Sep 8 20:28:41 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 20:28:41 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 20:32:04 2022 ] Mean training loss: 0.1122.
250
+ [ Thu Sep 8 20:32:04 2022 ] Time consumption: [Data]02%, [Network]98%
251
+ [ Thu Sep 8 20:32:04 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 20:34:16 2022 ] Epoch 57 Curr Acc: (11402/16487)69.16%
253
+ [ Thu Sep 8 20:34:16 2022 ] Epoch 55 Best Acc 69.46%
254
+ [ Thu Sep 8 20:34:16 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 20:34:16 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 20:37:37 2022 ] Mean training loss: 0.0942.
257
+ [ Thu Sep 8 20:37:37 2022 ] Time consumption: [Data]02%, [Network]98%
258
+ [ Thu Sep 8 20:37:37 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 20:39:49 2022 ] Epoch 58 Curr Acc: (11409/16487)69.20%
260
+ [ Thu Sep 8 20:39:49 2022 ] Epoch 55 Best Acc 69.46%
261
+ [ Thu Sep 8 20:39:49 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 20:39:49 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 20:43:11 2022 ] Mean training loss: 0.0820.
264
+ [ Thu Sep 8 20:43:11 2022 ] Time consumption: [Data]02%, [Network]98%
265
+ [ Thu Sep 8 20:43:11 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 20:45:23 2022 ] Epoch 59 Curr Acc: (11000/16487)66.72%
267
+ [ Thu Sep 8 20:45:23 2022 ] Epoch 55 Best Acc 69.46%
268
+ [ Thu Sep 8 20:45:23 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 20:45:23 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 20:48:44 2022 ] Mean training loss: 0.0771.
271
+ [ Thu Sep 8 20:48:44 2022 ] Time consumption: [Data]02%, [Network]98%
272
+ [ Thu Sep 8 20:48:44 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 20:50:56 2022 ] Epoch 60 Curr Acc: (11084/16487)67.23%
274
+ [ Thu Sep 8 20:50:56 2022 ] Epoch 55 Best Acc 69.46%
275
+ [ Thu Sep 8 20:50:56 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 20:50:56 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 20:54:18 2022 ] Mean training loss: 0.0672.
278
+ [ Thu Sep 8 20:54:18 2022 ] Time consumption: [Data]02%, [Network]98%
279
+ [ Thu Sep 8 20:54:18 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 20:56:30 2022 ] Epoch 61 Curr Acc: (11192/16487)67.88%
281
+ [ Thu Sep 8 20:56:30 2022 ] Epoch 55 Best Acc 69.46%
282
+ [ Thu Sep 8 20:56:30 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 20:56:30 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 20:59:52 2022 ] Mean training loss: 0.0621.
285
+ [ Thu Sep 8 20:59:52 2022 ] Time consumption: [Data]02%, [Network]98%
286
+ [ Thu Sep 8 20:59:52 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 21:02:04 2022 ] Epoch 62 Curr Acc: (10289/16487)62.41%
288
+ [ Thu Sep 8 21:02:04 2022 ] Epoch 55 Best Acc 69.46%
289
+ [ Thu Sep 8 21:02:04 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 21:02:04 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 21:05:25 2022 ] Mean training loss: 0.0567.
292
+ [ Thu Sep 8 21:05:25 2022 ] Time consumption: [Data]02%, [Network]98%
293
+ [ Thu Sep 8 21:05:25 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 21:07:37 2022 ] Epoch 63 Curr Acc: (10353/16487)62.79%
295
+ [ Thu Sep 8 21:07:37 2022 ] Epoch 55 Best Acc 69.46%
296
+ [ Thu Sep 8 21:07:37 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 21:07:37 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 21:10:59 2022 ] Mean training loss: 0.0561.
299
+ [ Thu Sep 8 21:10:59 2022 ] Time consumption: [Data]02%, [Network]98%
300
+ [ Thu Sep 8 21:10:59 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 21:13:11 2022 ] Epoch 64 Curr Acc: (9955/16487)60.38%
302
+ [ Thu Sep 8 21:13:11 2022 ] Epoch 55 Best Acc 69.46%
303
+ [ Thu Sep 8 21:13:11 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 21:13:11 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 21:16:32 2022 ] Mean training loss: 0.0475.
306
+ [ Thu Sep 8 21:16:32 2022 ] Time consumption: [Data]02%, [Network]98%
307
+ [ Thu Sep 8 21:16:32 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 21:18:45 2022 ] Epoch 65 Curr Acc: (11008/16487)66.77%
309
+ [ Thu Sep 8 21:18:45 2022 ] Epoch 55 Best Acc 69.46%
310
+ [ Thu Sep 8 21:18:45 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 21:18:45 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 21:22:06 2022 ] Mean training loss: 0.0453.
313
+ [ Thu Sep 8 21:22:06 2022 ] Time consumption: [Data]02%, [Network]98%
314
+ [ Thu Sep 8 21:22:06 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 21:24:18 2022 ] Epoch 66 Curr Acc: (11337/16487)68.76%
316
+ [ Thu Sep 8 21:24:18 2022 ] Epoch 55 Best Acc 69.46%
317
+ [ Thu Sep 8 21:24:18 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 21:24:18 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 21:27:40 2022 ] Mean training loss: 0.0457.
320
+ [ Thu Sep 8 21:27:40 2022 ] Time consumption: [Data]02%, [Network]98%
321
+ [ Thu Sep 8 21:27:40 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 21:29:53 2022 ] Epoch 67 Curr Acc: (10638/16487)64.52%
323
+ [ Thu Sep 8 21:29:53 2022 ] Epoch 55 Best Acc 69.46%
324
+ [ Thu Sep 8 21:29:53 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 21:29:53 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 21:33:14 2022 ] Mean training loss: 0.0494.
327
+ [ Thu Sep 8 21:33:14 2022 ] Time consumption: [Data]02%, [Network]98%
328
+ [ Thu Sep 8 21:33:14 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 21:35:26 2022 ] Epoch 68 Curr Acc: (11151/16487)67.64%
330
+ [ Thu Sep 8 21:35:26 2022 ] Epoch 55 Best Acc 69.46%
331
+ [ Thu Sep 8 21:35:26 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 21:35:26 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 21:38:47 2022 ] Mean training loss: 0.0378.
334
+ [ Thu Sep 8 21:38:47 2022 ] Time consumption: [Data]02%, [Network]98%
335
+ [ Thu Sep 8 21:38:48 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 21:41:00 2022 ] Epoch 69 Curr Acc: (11160/16487)67.69%
337
+ [ Thu Sep 8 21:41:00 2022 ] Epoch 55 Best Acc 69.46%
338
+ [ Thu Sep 8 21:41:00 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 21:41:00 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 21:44:21 2022 ] Mean training loss: 0.0363.
341
+ [ Thu Sep 8 21:44:21 2022 ] Time consumption: [Data]02%, [Network]98%
342
+ [ Thu Sep 8 21:44:21 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 21:46:34 2022 ] Epoch 70 Curr Acc: (10514/16487)63.77%
344
+ [ Thu Sep 8 21:46:34 2022 ] Epoch 55 Best Acc 69.46%
345
+ [ Thu Sep 8 21:46:34 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 21:46:34 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 21:49:55 2022 ] Mean training loss: 0.0281.
348
+ [ Thu Sep 8 21:49:55 2022 ] Time consumption: [Data]02%, [Network]98%
349
+ [ Thu Sep 8 21:49:55 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 21:52:07 2022 ] Epoch 71 Curr Acc: (11203/16487)67.95%
351
+ [ Thu Sep 8 21:52:07 2022 ] Epoch 55 Best Acc 69.46%
352
+ [ Thu Sep 8 21:52:07 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 21:52:07 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 21:55:28 2022 ] Mean training loss: 0.0247.
355
+ [ Thu Sep 8 21:55:28 2022 ] Time consumption: [Data]02%, [Network]98%
356
+ [ Thu Sep 8 21:55:28 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 21:57:40 2022 ] Epoch 72 Curr Acc: (10075/16487)61.11%
358
+ [ Thu Sep 8 21:57:40 2022 ] Epoch 55 Best Acc 69.46%
359
+ [ Thu Sep 8 21:57:40 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 21:57:40 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 22:01:01 2022 ] Mean training loss: 0.0245.
362
+ [ Thu Sep 8 22:01:01 2022 ] Time consumption: [Data]02%, [Network]98%
363
+ [ Thu Sep 8 22:01:01 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 22:03:14 2022 ] Epoch 73 Curr Acc: (10729/16487)65.08%
365
+ [ Thu Sep 8 22:03:14 2022 ] Epoch 55 Best Acc 69.46%
366
+ [ Thu Sep 8 22:03:14 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 22:03:14 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 22:06:35 2022 ] Mean training loss: 0.0211.
369
+ [ Thu Sep 8 22:06:35 2022 ] Time consumption: [Data]02%, [Network]98%
370
+ [ Thu Sep 8 22:06:35 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 22:08:47 2022 ] Epoch 74 Curr Acc: (10541/16487)63.94%
372
+ [ Thu Sep 8 22:08:47 2022 ] Epoch 55 Best Acc 69.46%
373
+ [ Thu Sep 8 22:08:47 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 22:08:47 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 22:12:08 2022 ] Mean training loss: 0.0194.
376
+ [ Thu Sep 8 22:12:08 2022 ] Time consumption: [Data]02%, [Network]98%
377
+ [ Thu Sep 8 22:12:08 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 22:14:20 2022 ] Epoch 75 Curr Acc: (9983/16487)60.55%
379
+ [ Thu Sep 8 22:14:20 2022 ] Epoch 55 Best Acc 69.46%
380
+ [ Thu Sep 8 22:14:20 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 22:14:20 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 22:17:41 2022 ] Mean training loss: 0.0206.
383
+ [ Thu Sep 8 22:17:41 2022 ] Time consumption: [Data]02%, [Network]98%
384
+ [ Thu Sep 8 22:17:42 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 22:19:54 2022 ] Epoch 76 Curr Acc: (11450/16487)69.45%
386
+ [ Thu Sep 8 22:19:54 2022 ] Epoch 55 Best Acc 69.46%
387
+ [ Thu Sep 8 22:19:54 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 22:19:54 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 22:23:15 2022 ] Mean training loss: 0.0160.
390
+ [ Thu Sep 8 22:23:15 2022 ] Time consumption: [Data]02%, [Network]98%
391
+ [ Thu Sep 8 22:23:15 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 22:25:27 2022 ] Epoch 77 Curr Acc: (9984/16487)60.56%
393
+ [ Thu Sep 8 22:25:27 2022 ] Epoch 55 Best Acc 69.46%
394
+ [ Thu Sep 8 22:25:27 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 22:25:27 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 22:28:48 2022 ] Mean training loss: 0.0171.
397
+ [ Thu Sep 8 22:28:48 2022 ] Time consumption: [Data]02%, [Network]98%
398
+ [ Thu Sep 8 22:28:48 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 22:31:01 2022 ] Epoch 78 Curr Acc: (10708/16487)64.95%
400
+ [ Thu Sep 8 22:31:01 2022 ] Epoch 55 Best Acc 69.46%
401
+ [ Thu Sep 8 22:31:01 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 22:31:01 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 22:34:21 2022 ] Mean training loss: 0.0179.
404
+ [ Thu Sep 8 22:34:21 2022 ] Time consumption: [Data]02%, [Network]98%
405
+ [ Thu Sep 8 22:34:21 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 22:36:34 2022 ] Epoch 79 Curr Acc: (10084/16487)61.16%
407
+ [ Thu Sep 8 22:36:34 2022 ] Epoch 55 Best Acc 69.46%
408
+ [ Thu Sep 8 22:36:34 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 22:36:34 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 22:39:54 2022 ] Mean training loss: 0.0162.
411
+ [ Thu Sep 8 22:39:54 2022 ] Time consumption: [Data]02%, [Network]98%
412
+ [ Thu Sep 8 22:39:55 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 22:42:07 2022 ] Epoch 80 Curr Acc: (10999/16487)66.71%
414
+ [ Thu Sep 8 22:42:07 2022 ] Epoch 55 Best Acc 69.46%
415
+ [ Thu Sep 8 22:42:07 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 22:42:07 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 22:45:28 2022 ] Mean training loss: 0.0190.
418
+ [ Thu Sep 8 22:45:28 2022 ] Time consumption: [Data]02%, [Network]98%
419
+ [ Thu Sep 8 22:45:28 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 22:47:40 2022 ] Epoch 81 Curr Acc: (11188/16487)67.86%
421
+ [ Thu Sep 8 22:47:40 2022 ] Epoch 55 Best Acc 69.46%
422
+ [ Thu Sep 8 22:47:40 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 22:47:40 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 22:51:01 2022 ] Mean training loss: 0.0153.
425
+ [ Thu Sep 8 22:51:01 2022 ] Time consumption: [Data]02%, [Network]98%
426
+ [ Thu Sep 8 22:51:01 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 22:53:13 2022 ] Epoch 82 Curr Acc: (9668/16487)58.64%
428
+ [ Thu Sep 8 22:53:13 2022 ] Epoch 55 Best Acc 69.46%
429
+ [ Thu Sep 8 22:53:13 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 22:53:13 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 22:56:35 2022 ] Mean training loss: 0.0170.
432
+ [ Thu Sep 8 22:56:35 2022 ] Time consumption: [Data]02%, [Network]98%
433
+ [ Thu Sep 8 22:56:35 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 22:58:47 2022 ] Epoch 83 Curr Acc: (11192/16487)67.88%
435
+ [ Thu Sep 8 22:58:47 2022 ] Epoch 55 Best Acc 69.46%
436
+ [ Thu Sep 8 22:58:47 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 22:58:47 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 23:02:09 2022 ] Mean training loss: 0.0166.
439
+ [ Thu Sep 8 23:02:09 2022 ] Time consumption: [Data]02%, [Network]98%
440
+ [ Thu Sep 8 23:02:09 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 23:04:21 2022 ] Epoch 84 Curr Acc: (10297/16487)62.46%
442
+ [ Thu Sep 8 23:04:21 2022 ] Epoch 55 Best Acc 69.46%
443
+ [ Thu Sep 8 23:04:21 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 23:04:21 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 23:07:43 2022 ] Mean training loss: 0.0164.
446
+ [ Thu Sep 8 23:07:43 2022 ] Time consumption: [Data]02%, [Network]98%
447
+ [ Thu Sep 8 23:07:43 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 23:09:56 2022 ] Epoch 85 Curr Acc: (11152/16487)67.64%
449
+ [ Thu Sep 8 23:09:56 2022 ] Epoch 55 Best Acc 69.46%
450
+ [ Thu Sep 8 23:09:56 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 23:09:56 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 23:13:17 2022 ] Mean training loss: 0.0149.
453
+ [ Thu Sep 8 23:13:17 2022 ] Time consumption: [Data]02%, [Network]98%
454
+ [ Thu Sep 8 23:13:17 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 23:15:29 2022 ] Epoch 86 Curr Acc: (10591/16487)64.24%
456
+ [ Thu Sep 8 23:15:29 2022 ] Epoch 55 Best Acc 69.46%
457
+ [ Thu Sep 8 23:15:29 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 23:15:29 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 23:18:51 2022 ] Mean training loss: 0.0143.
460
+ [ Thu Sep 8 23:18:51 2022 ] Time consumption: [Data]02%, [Network]98%
461
+ [ Thu Sep 8 23:18:51 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 23:21:03 2022 ] Epoch 87 Curr Acc: (11437/16487)69.37%
463
+ [ Thu Sep 8 23:21:03 2022 ] Epoch 55 Best Acc 69.46%
464
+ [ Thu Sep 8 23:21:03 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 23:21:03 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 23:24:24 2022 ] Mean training loss: 0.0148.
467
+ [ Thu Sep 8 23:24:24 2022 ] Time consumption: [Data]02%, [Network]98%
468
+ [ Thu Sep 8 23:24:24 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 23:26:37 2022 ] Epoch 88 Curr Acc: (11322/16487)68.67%
470
+ [ Thu Sep 8 23:26:37 2022 ] Epoch 55 Best Acc 69.46%
471
+ [ Thu Sep 8 23:26:37 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 23:26:37 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 23:29:57 2022 ] Mean training loss: 0.0160.
474
+ [ Thu Sep 8 23:29:57 2022 ] Time consumption: [Data]02%, [Network]98%
475
+ [ Thu Sep 8 23:29:58 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 23:32:10 2022 ] Epoch 89 Curr Acc: (11235/16487)68.14%
477
+ [ Thu Sep 8 23:32:10 2022 ] Epoch 55 Best Acc 69.46%
478
+ [ Thu Sep 8 23:32:10 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 23:32:10 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 23:35:31 2022 ] Mean training loss: 0.0160.
481
+ [ Thu Sep 8 23:35:31 2022 ] Time consumption: [Data]02%, [Network]98%
482
+ [ Thu Sep 8 23:35:31 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 23:37:43 2022 ] Epoch 90 Curr Acc: (11088/16487)67.25%
484
+ [ Thu Sep 8 23:37:43 2022 ] Epoch 55 Best Acc 69.46%
485
+ [ Thu Sep 8 23:37:43 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 23:37:43 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 23:41:05 2022 ] Mean training loss: 0.0152.
488
+ [ Thu Sep 8 23:41:05 2022 ] Time consumption: [Data]02%, [Network]98%
489
+ [ Thu Sep 8 23:41:05 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 23:43:17 2022 ] Epoch 91 Curr Acc: (11576/16487)70.21%
491
+ [ Thu Sep 8 23:43:17 2022 ] Epoch 91 Best Acc 70.21%
492
+ [ Thu Sep 8 23:43:17 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 23:43:17 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 23:46:38 2022 ] Mean training loss: 0.0156.
495
+ [ Thu Sep 8 23:46:38 2022 ] Time consumption: [Data]02%, [Network]98%
496
+ [ Thu Sep 8 23:46:38 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 23:48:51 2022 ] Epoch 92 Curr Acc: (9849/16487)59.74%
498
+ [ Thu Sep 8 23:48:51 2022 ] Epoch 91 Best Acc 70.21%
499
+ [ Thu Sep 8 23:48:51 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 23:48:51 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 23:52:12 2022 ] Mean training loss: 0.0135.
502
+ [ Thu Sep 8 23:52:12 2022 ] Time consumption: [Data]02%, [Network]98%
503
+ [ Thu Sep 8 23:52:12 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 23:54:24 2022 ] Epoch 93 Curr Acc: (11271/16487)68.36%
505
+ [ Thu Sep 8 23:54:24 2022 ] Epoch 91 Best Acc 70.21%
506
+ [ Thu Sep 8 23:54:24 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 23:54:24 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 23:57:46 2022 ] Mean training loss: 0.0134.
509
+ [ Thu Sep 8 23:57:46 2022 ] Time consumption: [Data]02%, [Network]98%
510
+ [ Thu Sep 8 23:57:46 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 23:59:59 2022 ] Epoch 94 Curr Acc: (10530/16487)63.87%
512
+ [ Thu Sep 8 23:59:59 2022 ] Epoch 91 Best Acc 70.21%
513
+ [ Thu Sep 8 23:59:59 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 23:59:59 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Fri Sep 9 00:03:20 2022 ] Mean training loss: 0.0135.
516
+ [ Fri Sep 9 00:03:20 2022 ] Time consumption: [Data]02%, [Network]98%
517
+ [ Fri Sep 9 00:03:20 2022 ] Eval epoch: 95
518
+ [ Fri Sep 9 00:05:33 2022 ] Epoch 95 Curr Acc: (10177/16487)61.73%
519
+ [ Fri Sep 9 00:05:33 2022 ] Epoch 91 Best Acc 70.21%
520
+ [ Fri Sep 9 00:05:33 2022 ] Training epoch: 96
521
+ [ Fri Sep 9 00:05:33 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Fri Sep 9 00:08:54 2022 ] Mean training loss: 0.0145.
523
+ [ Fri Sep 9 00:08:54 2022 ] Time consumption: [Data]02%, [Network]98%
524
+ [ Fri Sep 9 00:08:54 2022 ] Eval epoch: 96
525
+ [ Fri Sep 9 00:11:06 2022 ] Epoch 96 Curr Acc: (11439/16487)69.38%
526
+ [ Fri Sep 9 00:11:06 2022 ] Epoch 91 Best Acc 70.21%
527
+ [ Fri Sep 9 00:11:06 2022 ] Training epoch: 97
528
+ [ Fri Sep 9 00:11:06 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Fri Sep 9 00:14:28 2022 ] Mean training loss: 0.0128.
530
+ [ Fri Sep 9 00:14:28 2022 ] Time consumption: [Data]02%, [Network]98%
531
+ [ Fri Sep 9 00:14:28 2022 ] Eval epoch: 97
532
+ [ Fri Sep 9 00:16:40 2022 ] Epoch 97 Curr Acc: (10205/16487)61.90%
533
+ [ Fri Sep 9 00:16:40 2022 ] Epoch 91 Best Acc 70.21%
534
+ [ Fri Sep 9 00:16:40 2022 ] Training epoch: 98
535
+ [ Fri Sep 9 00:16:40 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Fri Sep 9 00:20:01 2022 ] Mean training loss: 0.0156.
537
+ [ Fri Sep 9 00:20:01 2022 ] Time consumption: [Data]02%, [Network]98%
538
+ [ Fri Sep 9 00:20:01 2022 ] Eval epoch: 98
539
+ [ Fri Sep 9 00:22:13 2022 ] Epoch 98 Curr Acc: (11479/16487)69.62%
540
+ [ Fri Sep 9 00:22:13 2022 ] Epoch 91 Best Acc 70.21%
541
+ [ Fri Sep 9 00:22:13 2022 ] Training epoch: 99
542
+ [ Fri Sep 9 00:22:13 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Fri Sep 9 00:25:35 2022 ] Mean training loss: 0.0147.
544
+ [ Fri Sep 9 00:25:35 2022 ] Time consumption: [Data]02%, [Network]98%
545
+ [ Fri Sep 9 00:25:35 2022 ] Eval epoch: 99
546
+ [ Fri Sep 9 00:27:47 2022 ] Epoch 99 Curr Acc: (9911/16487)60.11%
547
+ [ Fri Sep 9 00:27:47 2022 ] Epoch 91 Best Acc 70.21%
548
+ [ Fri Sep 9 00:27:47 2022 ] Training epoch: 100
549
+ [ Fri Sep 9 00:27:47 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Fri Sep 9 00:31:08 2022 ] Mean training loss: 0.0143.
551
+ [ Fri Sep 9 00:31:08 2022 ] Time consumption: [Data]02%, [Network]98%
552
+ [ Fri Sep 9 00:31:08 2022 ] Eval epoch: 100
553
+ [ Fri Sep 9 00:33:20 2022 ] Epoch 100 Curr Acc: (11292/16487)68.49%
554
+ [ Fri Sep 9 00:33:20 2022 ] Epoch 91 Best Acc 70.21%
555
+ [ Fri Sep 9 00:33:21 2022 ] Training epoch: 101
556
+ [ Fri Sep 9 00:33:21 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Fri Sep 9 00:36:41 2022 ] Mean training loss: 0.0164.
558
+ [ Fri Sep 9 00:36:41 2022 ] Time consumption: [Data]02%, [Network]98%
559
+ [ Fri Sep 9 00:36:41 2022 ] Eval epoch: 101
560
+ [ Fri Sep 9 00:38:54 2022 ] Epoch 101 Curr Acc: (11442/16487)69.40%
561
+ [ Fri Sep 9 00:38:54 2022 ] Epoch 91 Best Acc 70.21%
562
+ [ Fri Sep 9 00:38:54 2022 ] Training epoch: 102
563
+ [ Fri Sep 9 00:38:54 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Fri Sep 9 00:42:15 2022 ] Mean training loss: 0.0159.
565
+ [ Fri Sep 9 00:42:15 2022 ] Time consumption: [Data]02%, [Network]98%
566
+ [ Fri Sep 9 00:42:15 2022 ] Eval epoch: 102
567
+ [ Fri Sep 9 00:44:27 2022 ] Epoch 102 Curr Acc: (10232/16487)62.06%
568
+ [ Fri Sep 9 00:44:27 2022 ] Epoch 91 Best Acc 70.21%
569
+ [ Fri Sep 9 00:44:27 2022 ] Training epoch: 103
570
+ [ Fri Sep 9 00:44:27 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Fri Sep 9 00:47:48 2022 ] Mean training loss: 0.0156.
572
+ [ Fri Sep 9 00:47:48 2022 ] Time consumption: [Data]02%, [Network]98%
573
+ [ Fri Sep 9 00:47:48 2022 ] Eval epoch: 103
574
+ [ Fri Sep 9 00:50:01 2022 ] Epoch 103 Curr Acc: (10424/16487)63.23%
575
+ [ Fri Sep 9 00:50:01 2022 ] Epoch 91 Best Acc 70.21%
576
+ [ Fri Sep 9 00:50:01 2022 ] Training epoch: 104
577
+ [ Fri Sep 9 00:50:01 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Fri Sep 9 00:53:22 2022 ] Mean training loss: 0.0147.
579
+ [ Fri Sep 9 00:53:22 2022 ] Time consumption: [Data]02%, [Network]98%
580
+ [ Fri Sep 9 00:53:22 2022 ] Eval epoch: 104
581
+ [ Fri Sep 9 00:55:34 2022 ] Epoch 104 Curr Acc: (10890/16487)66.05%
582
+ [ Fri Sep 9 00:55:34 2022 ] Epoch 91 Best Acc 70.21%
583
+ [ Fri Sep 9 00:55:34 2022 ] Training epoch: 105
584
+ [ Fri Sep 9 00:55:34 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Fri Sep 9 00:58:55 2022 ] Mean training loss: 0.0152.
586
+ [ Fri Sep 9 00:58:55 2022 ] Time consumption: [Data]02%, [Network]98%
587
+ [ Fri Sep 9 00:58:55 2022 ] Eval epoch: 105
588
+ [ Fri Sep 9 01:01:07 2022 ] Epoch 105 Curr Acc: (10402/16487)63.09%
589
+ [ Fri Sep 9 01:01:07 2022 ] Epoch 91 Best Acc 70.21%
590
+ [ Fri Sep 9 01:01:07 2022 ] Training epoch: 106
591
+ [ Fri Sep 9 01:01:07 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Fri Sep 9 01:04:29 2022 ] Mean training loss: 0.0138.
593
+ [ Fri Sep 9 01:04:29 2022 ] Time consumption: [Data]02%, [Network]98%
594
+ [ Fri Sep 9 01:04:29 2022 ] Eval epoch: 106
595
+ [ Fri Sep 9 01:06:42 2022 ] Epoch 106 Curr Acc: (11271/16487)68.36%
596
+ [ Fri Sep 9 01:06:42 2022 ] Epoch 91 Best Acc 70.21%
597
+ [ Fri Sep 9 01:06:42 2022 ] Training epoch: 107
598
+ [ Fri Sep 9 01:06:42 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Fri Sep 9 01:10:03 2022 ] Mean training loss: 0.0148.
600
+ [ Fri Sep 9 01:10:03 2022 ] Time consumption: [Data]02%, [Network]98%
601
+ [ Fri Sep 9 01:10:03 2022 ] Eval epoch: 107
602
+ [ Fri Sep 9 01:12:16 2022 ] Epoch 107 Curr Acc: (9807/16487)59.48%
603
+ [ Fri Sep 9 01:12:16 2022 ] Epoch 91 Best Acc 70.21%
604
+ [ Fri Sep 9 01:12:16 2022 ] Training epoch: 108
605
+ [ Fri Sep 9 01:12:16 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Fri Sep 9 01:15:37 2022 ] Mean training loss: 0.0147.
607
+ [ Fri Sep 9 01:15:37 2022 ] Time consumption: [Data]02%, [Network]98%
608
+ [ Fri Sep 9 01:15:37 2022 ] Eval epoch: 108
609
+ [ Fri Sep 9 01:17:49 2022 ] Epoch 108 Curr Acc: (10299/16487)62.47%
610
+ [ Fri Sep 9 01:17:49 2022 ] Epoch 91 Best Acc 70.21%
611
+ [ Fri Sep 9 01:17:49 2022 ] Training epoch: 109
612
+ [ Fri Sep 9 01:17:49 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Fri Sep 9 01:21:11 2022 ] Mean training loss: 0.0153.
614
+ [ Fri Sep 9 01:21:11 2022 ] Time consumption: [Data]02%, [Network]98%
615
+ [ Fri Sep 9 01:21:11 2022 ] Eval epoch: 109
616
+ [ Fri Sep 9 01:23:23 2022 ] Epoch 109 Curr Acc: (9934/16487)60.25%
617
+ [ Fri Sep 9 01:23:23 2022 ] Epoch 91 Best Acc 70.21%
618
+ [ Fri Sep 9 01:23:23 2022 ] Training epoch: 110
619
+ [ Fri Sep 9 01:23:23 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Fri Sep 9 01:26:44 2022 ] Mean training loss: 0.0146.
621
+ [ Fri Sep 9 01:26:44 2022 ] Time consumption: [Data]02%, [Network]98%
622
+ [ Fri Sep 9 01:26:44 2022 ] Eval epoch: 110
623
+ [ Fri Sep 9 01:28:56 2022 ] Epoch 110 Curr Acc: (10218/16487)61.98%
624
+ [ Fri Sep 9 01:28:56 2022 ] Epoch 91 Best Acc 70.21%
625
+ [ Fri Sep 9 01:28:56 2022 ] epoch: 91, best accuracy: 0.7021289500818827
626
+ [ Fri Sep 9 01:28:56 2022 ] Experiment: ./work_dir/ntu/xsub_b
627
+ [ Fri Sep 9 01:28:57 2022 ] # generator parameters: 2.896055 M.
628
+ [ Fri Sep 9 01:28:57 2022 ] Load weights from ./runs/ntu/xsub_b/runs-90-89726.pt.
629
+ [ Fri Sep 9 01:28:57 2022 ] Eval epoch: 1
630
+ [ Fri Sep 9 01:31:09 2022 ] Epoch 1 Curr Acc: (11576/16487)70.21%
631
+ [ Fri Sep 9 01:31:09 2022 ] Epoch 91 Best Acc 70.21%
ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu/xsub_bm.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 60
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu/xsub_bm/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu/xsub_bm
ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:921660534cf8af04cc285195b7bc526ec1ef4809c15360c66fde68d2b45f53e7
3
+ size 4979902
ckpt/Others/MST-GCN/ntu60_xsub/xsub_bm/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 8 17:07:44 2022 ] # generator parameters: 2.896055 M.
2
+ [ Thu Sep 8 17:07:44 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu/xsub_bm', 'model_saved_name': './runs/ntu/xsub_bm/runs', 'config': 'config/ntu/xsub_bm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_bone_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Thu Sep 8 17:07:44 2022 ] Training epoch: 1
6
+ [ Thu Sep 8 17:07:44 2022 ] Learning rate: 0.015
7
+ [ Thu Sep 8 17:11:09 2022 ] Mean training loss: 3.2652.
8
+ [ Thu Sep 8 17:11:09 2022 ] Time consumption: [Data]02%, [Network]98%
9
+ [ Thu Sep 8 17:11:09 2022 ] Training epoch: 2
10
+ [ Thu Sep 8 17:11:09 2022 ] Learning rate: 0.03
11
+ [ Thu Sep 8 17:14:33 2022 ] Mean training loss: 2.4220.
12
+ [ Thu Sep 8 17:14:33 2022 ] Time consumption: [Data]01%, [Network]98%
13
+ [ Thu Sep 8 17:14:33 2022 ] Training epoch: 3
14
+ [ Thu Sep 8 17:14:33 2022 ] Learning rate: 0.045
15
+ [ Thu Sep 8 17:17:56 2022 ] Mean training loss: 2.0056.
16
+ [ Thu Sep 8 17:17:56 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Thu Sep 8 17:17:56 2022 ] Training epoch: 4
18
+ [ Thu Sep 8 17:17:56 2022 ] Learning rate: 0.06
19
+ [ Thu Sep 8 17:21:19 2022 ] Mean training loss: 1.6940.
20
+ [ Thu Sep 8 17:21:19 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Thu Sep 8 17:21:19 2022 ] Training epoch: 5
22
+ [ Thu Sep 8 17:21:19 2022 ] Learning rate: 0.075
23
+ [ Thu Sep 8 17:24:42 2022 ] Mean training loss: 1.5027.
24
+ [ Thu Sep 8 17:24:42 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Thu Sep 8 17:24:42 2022 ] Training epoch: 6
26
+ [ Thu Sep 8 17:24:42 2022 ] Learning rate: 0.09
27
+ [ Thu Sep 8 17:28:05 2022 ] Mean training loss: 1.3904.
28
+ [ Thu Sep 8 17:28:05 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Thu Sep 8 17:28:05 2022 ] Training epoch: 7
30
+ [ Thu Sep 8 17:28:05 2022 ] Learning rate: 0.10500000000000001
31
+ [ Thu Sep 8 17:31:28 2022 ] Mean training loss: 1.3177.
32
+ [ Thu Sep 8 17:31:28 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Thu Sep 8 17:31:28 2022 ] Training epoch: 8
34
+ [ Thu Sep 8 17:31:28 2022 ] Learning rate: 0.12
35
+ [ Thu Sep 8 17:34:51 2022 ] Mean training loss: 1.2143.
36
+ [ Thu Sep 8 17:34:51 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Thu Sep 8 17:34:51 2022 ] Training epoch: 9
38
+ [ Thu Sep 8 17:34:51 2022 ] Learning rate: 0.13499999999999998
39
+ [ Thu Sep 8 17:38:13 2022 ] Mean training loss: 1.1919.
40
+ [ Thu Sep 8 17:38:13 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Thu Sep 8 17:38:13 2022 ] Training epoch: 10
42
+ [ Thu Sep 8 17:38:13 2022 ] Learning rate: 0.15
43
+ [ Thu Sep 8 17:41:35 2022 ] Mean training loss: 1.1271.
44
+ [ Thu Sep 8 17:41:35 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Thu Sep 8 17:41:35 2022 ] Training epoch: 11
46
+ [ Thu Sep 8 17:41:35 2022 ] Learning rate: 0.15
47
+ [ Thu Sep 8 17:44:58 2022 ] Mean training loss: 1.0812.
48
+ [ Thu Sep 8 17:44:58 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Thu Sep 8 17:44:58 2022 ] Training epoch: 12
50
+ [ Thu Sep 8 17:44:58 2022 ] Learning rate: 0.15
51
+ [ Thu Sep 8 17:48:21 2022 ] Mean training loss: 1.0247.
52
+ [ Thu Sep 8 17:48:21 2022 ] Time consumption: [Data]01%, [Network]98%
53
+ [ Thu Sep 8 17:48:21 2022 ] Training epoch: 13
54
+ [ Thu Sep 8 17:48:21 2022 ] Learning rate: 0.15
55
+ [ Thu Sep 8 17:51:44 2022 ] Mean training loss: 0.9885.
56
+ [ Thu Sep 8 17:51:44 2022 ] Time consumption: [Data]01%, [Network]98%
57
+ [ Thu Sep 8 17:51:44 2022 ] Training epoch: 14
58
+ [ Thu Sep 8 17:51:44 2022 ] Learning rate: 0.15
59
+ [ Thu Sep 8 17:55:07 2022 ] Mean training loss: 0.9666.
60
+ [ Thu Sep 8 17:55:07 2022 ] Time consumption: [Data]01%, [Network]98%
61
+ [ Thu Sep 8 17:55:07 2022 ] Training epoch: 15
62
+ [ Thu Sep 8 17:55:07 2022 ] Learning rate: 0.15
63
+ [ Thu Sep 8 17:58:29 2022 ] Mean training loss: 0.9116.
64
+ [ Thu Sep 8 17:58:29 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Thu Sep 8 17:58:29 2022 ] Training epoch: 16
66
+ [ Thu Sep 8 17:58:29 2022 ] Learning rate: 0.15
67
+ [ Thu Sep 8 18:01:52 2022 ] Mean training loss: 0.9041.
68
+ [ Thu Sep 8 18:01:52 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Thu Sep 8 18:01:52 2022 ] Training epoch: 17
70
+ [ Thu Sep 8 18:01:52 2022 ] Learning rate: 0.15
71
+ [ Thu Sep 8 18:05:15 2022 ] Mean training loss: 0.8950.
72
+ [ Thu Sep 8 18:05:15 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Thu Sep 8 18:05:15 2022 ] Training epoch: 18
74
+ [ Thu Sep 8 18:05:15 2022 ] Learning rate: 0.15
75
+ [ Thu Sep 8 18:08:37 2022 ] Mean training loss: 0.8623.
76
+ [ Thu Sep 8 18:08:37 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Thu Sep 8 18:08:37 2022 ] Training epoch: 19
78
+ [ Thu Sep 8 18:08:37 2022 ] Learning rate: 0.15
79
+ [ Thu Sep 8 18:12:01 2022 ] Mean training loss: 0.8450.
80
+ [ Thu Sep 8 18:12:01 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Thu Sep 8 18:12:01 2022 ] Training epoch: 20
82
+ [ Thu Sep 8 18:12:01 2022 ] Learning rate: 0.15
83
+ [ Thu Sep 8 18:15:24 2022 ] Mean training loss: 0.8082.
84
+ [ Thu Sep 8 18:15:24 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Thu Sep 8 18:15:24 2022 ] Training epoch: 21
86
+ [ Thu Sep 8 18:15:24 2022 ] Learning rate: 0.15
87
+ [ Thu Sep 8 18:18:47 2022 ] Mean training loss: 0.7992.
88
+ [ Thu Sep 8 18:18:47 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Thu Sep 8 18:18:47 2022 ] Training epoch: 22
90
+ [ Thu Sep 8 18:18:47 2022 ] Learning rate: 0.15
91
+ [ Thu Sep 8 18:22:09 2022 ] Mean training loss: 0.7911.
92
+ [ Thu Sep 8 18:22:09 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Thu Sep 8 18:22:09 2022 ] Training epoch: 23
94
+ [ Thu Sep 8 18:22:09 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 18:25:32 2022 ] Mean training loss: 0.7590.
96
+ [ Thu Sep 8 18:25:32 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Thu Sep 8 18:25:32 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 18:25:32 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 18:28:54 2022 ] Mean training loss: 0.7569.
100
+ [ Thu Sep 8 18:28:54 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Thu Sep 8 18:28:54 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 18:28:54 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 18:32:16 2022 ] Mean training loss: 0.7495.
104
+ [ Thu Sep 8 18:32:16 2022 ] Time consumption: [Data]01%, [Network]99%
105
+ [ Thu Sep 8 18:32:16 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 18:32:16 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 18:35:39 2022 ] Mean training loss: 0.7439.
108
+ [ Thu Sep 8 18:35:39 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Thu Sep 8 18:35:39 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 18:35:39 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 18:39:01 2022 ] Mean training loss: 0.7216.
112
+ [ Thu Sep 8 18:39:01 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Thu Sep 8 18:39:01 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 18:39:01 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 18:42:24 2022 ] Mean training loss: 0.7199.
116
+ [ Thu Sep 8 18:42:24 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Thu Sep 8 18:42:24 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 18:42:24 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 18:45:48 2022 ] Mean training loss: 0.6933.
120
+ [ Thu Sep 8 18:45:48 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Thu Sep 8 18:45:48 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 18:45:48 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 18:49:11 2022 ] Mean training loss: 0.6866.
124
+ [ Thu Sep 8 18:49:11 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Thu Sep 8 18:49:11 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 18:49:11 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 18:52:34 2022 ] Mean training loss: 0.6778.
128
+ [ Thu Sep 8 18:52:34 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Thu Sep 8 18:52:34 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 18:52:34 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 18:55:57 2022 ] Mean training loss: 0.6950.
132
+ [ Thu Sep 8 18:55:57 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Thu Sep 8 18:55:57 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 18:55:57 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 18:59:20 2022 ] Mean training loss: 0.6740.
136
+ [ Thu Sep 8 18:59:20 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 18:59:20 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 18:59:20 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 19:02:44 2022 ] Mean training loss: 0.6846.
140
+ [ Thu Sep 8 19:02:44 2022 ] Time consumption: [Data]01%, [Network]98%
141
+ [ Thu Sep 8 19:02:44 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 19:02:44 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 19:06:08 2022 ] Mean training loss: 0.6776.
144
+ [ Thu Sep 8 19:06:08 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 19:06:08 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 19:06:08 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 19:09:31 2022 ] Mean training loss: 0.6309.
148
+ [ Thu Sep 8 19:09:31 2022 ] Time consumption: [Data]01%, [Network]99%
149
+ [ Thu Sep 8 19:09:31 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 19:09:31 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 19:12:55 2022 ] Mean training loss: 0.6563.
152
+ [ Thu Sep 8 19:12:55 2022 ] Time consumption: [Data]01%, [Network]98%
153
+ [ Thu Sep 8 19:12:55 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 19:12:55 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 19:16:17 2022 ] Mean training loss: 0.6415.
156
+ [ Thu Sep 8 19:16:17 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 19:16:17 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 19:16:17 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 19:19:40 2022 ] Mean training loss: 0.6491.
160
+ [ Thu Sep 8 19:19:40 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 19:19:40 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 19:19:40 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 19:23:02 2022 ] Mean training loss: 0.6362.
164
+ [ Thu Sep 8 19:23:02 2022 ] Time consumption: [Data]01%, [Network]99%
165
+ [ Thu Sep 8 19:23:02 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 19:23:02 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 19:26:25 2022 ] Mean training loss: 0.6469.
168
+ [ Thu Sep 8 19:26:25 2022 ] Time consumption: [Data]01%, [Network]99%
169
+ [ Thu Sep 8 19:26:25 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 19:26:25 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 19:29:48 2022 ] Mean training loss: 0.6334.
172
+ [ Thu Sep 8 19:29:48 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 19:29:48 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 19:29:48 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 19:33:11 2022 ] Mean training loss: 0.6247.
176
+ [ Thu Sep 8 19:33:11 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 19:33:11 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 19:33:11 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 19:36:35 2022 ] Mean training loss: 0.6268.
180
+ [ Thu Sep 8 19:36:35 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 19:36:35 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 19:36:35 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 19:39:58 2022 ] Mean training loss: 0.6221.
184
+ [ Thu Sep 8 19:39:58 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 19:39:58 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 19:39:58 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 19:43:21 2022 ] Mean training loss: 0.5956.
188
+ [ Thu Sep 8 19:43:21 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 19:43:21 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 19:43:21 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 19:46:44 2022 ] Mean training loss: 0.6218.
192
+ [ Thu Sep 8 19:46:44 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 19:46:44 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 19:46:44 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 19:50:06 2022 ] Mean training loss: 0.6134.
196
+ [ Thu Sep 8 19:50:06 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 19:50:06 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 19:50:06 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 19:53:29 2022 ] Mean training loss: 0.6152.
200
+ [ Thu Sep 8 19:53:29 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 19:53:29 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 19:53:29 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 19:56:52 2022 ] Mean training loss: 0.6170.
204
+ [ Thu Sep 8 19:56:52 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 19:56:52 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 19:56:52 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 20:00:15 2022 ] Mean training loss: 0.2832.
208
+ [ Thu Sep 8 20:00:15 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 20:00:15 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 20:02:29 2022 ] Epoch 51 Curr Acc: (10756/16487)65.24%
211
+ [ Thu Sep 8 20:02:29 2022 ] Epoch 51 Best Acc 65.24%
212
+ [ Thu Sep 8 20:02:29 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 20:02:29 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 20:05:52 2022 ] Mean training loss: 0.1790.
215
+ [ Thu Sep 8 20:05:52 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 20:05:52 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 20:08:05 2022 ] Epoch 52 Curr Acc: (10904/16487)66.14%
218
+ [ Thu Sep 8 20:08:05 2022 ] Epoch 52 Best Acc 66.14%
219
+ [ Thu Sep 8 20:08:05 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 20:08:05 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 20:11:29 2022 ] Mean training loss: 0.1376.
222
+ [ Thu Sep 8 20:11:29 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 20:11:29 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 20:13:42 2022 ] Epoch 53 Curr Acc: (10856/16487)65.85%
225
+ [ Thu Sep 8 20:13:42 2022 ] Epoch 52 Best Acc 66.14%
226
+ [ Thu Sep 8 20:13:42 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 20:13:42 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 20:17:06 2022 ] Mean training loss: 0.1071.
229
+ [ Thu Sep 8 20:17:06 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 20:17:06 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 20:19:19 2022 ] Epoch 54 Curr Acc: (10995/16487)66.69%
232
+ [ Thu Sep 8 20:19:19 2022 ] Epoch 54 Best Acc 66.69%
233
+ [ Thu Sep 8 20:19:19 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 20:19:19 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 20:22:42 2022 ] Mean training loss: 0.0883.
236
+ [ Thu Sep 8 20:22:42 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 20:22:42 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 20:24:56 2022 ] Epoch 55 Curr Acc: (11004/16487)66.74%
239
+ [ Thu Sep 8 20:24:56 2022 ] Epoch 55 Best Acc 66.74%
240
+ [ Thu Sep 8 20:24:56 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 20:24:56 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 20:28:18 2022 ] Mean training loss: 0.0733.
243
+ [ Thu Sep 8 20:28:18 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 20:28:18 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 20:30:32 2022 ] Epoch 56 Curr Acc: (10605/16487)64.32%
246
+ [ Thu Sep 8 20:30:32 2022 ] Epoch 55 Best Acc 66.74%
247
+ [ Thu Sep 8 20:30:32 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 20:30:32 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 20:33:56 2022 ] Mean training loss: 0.0621.
250
+ [ Thu Sep 8 20:33:56 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 20:33:56 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 20:36:09 2022 ] Epoch 57 Curr Acc: (10745/16487)65.17%
253
+ [ Thu Sep 8 20:36:09 2022 ] Epoch 55 Best Acc 66.74%
254
+ [ Thu Sep 8 20:36:09 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 20:36:09 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 20:39:32 2022 ] Mean training loss: 0.0508.
257
+ [ Thu Sep 8 20:39:32 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 20:39:32 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 20:41:45 2022 ] Epoch 58 Curr Acc: (11035/16487)66.93%
260
+ [ Thu Sep 8 20:41:45 2022 ] Epoch 58 Best Acc 66.93%
261
+ [ Thu Sep 8 20:41:45 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 20:41:45 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 20:45:08 2022 ] Mean training loss: 0.0404.
264
+ [ Thu Sep 8 20:45:08 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 20:45:08 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 20:47:21 2022 ] Epoch 59 Curr Acc: (10968/16487)66.53%
267
+ [ Thu Sep 8 20:47:21 2022 ] Epoch 58 Best Acc 66.93%
268
+ [ Thu Sep 8 20:47:21 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 20:47:21 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 20:50:44 2022 ] Mean training loss: 0.0337.
271
+ [ Thu Sep 8 20:50:44 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 20:50:44 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 20:52:57 2022 ] Epoch 60 Curr Acc: (10982/16487)66.61%
274
+ [ Thu Sep 8 20:52:57 2022 ] Epoch 58 Best Acc 66.93%
275
+ [ Thu Sep 8 20:52:57 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 20:52:57 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 20:56:21 2022 ] Mean training loss: 0.0337.
278
+ [ Thu Sep 8 20:56:21 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 20:56:21 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 20:58:34 2022 ] Epoch 61 Curr Acc: (10856/16487)65.85%
281
+ [ Thu Sep 8 20:58:34 2022 ] Epoch 58 Best Acc 66.93%
282
+ [ Thu Sep 8 20:58:34 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 20:58:34 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 21:01:57 2022 ] Mean training loss: 0.0259.
285
+ [ Thu Sep 8 21:01:57 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 21:01:57 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 21:04:10 2022 ] Epoch 62 Curr Acc: (10751/16487)65.21%
288
+ [ Thu Sep 8 21:04:10 2022 ] Epoch 58 Best Acc 66.93%
289
+ [ Thu Sep 8 21:04:10 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 21:04:10 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 21:07:33 2022 ] Mean training loss: 0.0283.
292
+ [ Thu Sep 8 21:07:33 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 21:07:33 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 21:09:46 2022 ] Epoch 63 Curr Acc: (10894/16487)66.08%
295
+ [ Thu Sep 8 21:09:46 2022 ] Epoch 58 Best Acc 66.93%
296
+ [ Thu Sep 8 21:09:46 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 21:09:46 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 21:13:09 2022 ] Mean training loss: 0.0221.
299
+ [ Thu Sep 8 21:13:09 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 21:13:09 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 21:15:22 2022 ] Epoch 64 Curr Acc: (10604/16487)64.32%
302
+ [ Thu Sep 8 21:15:22 2022 ] Epoch 58 Best Acc 66.93%
303
+ [ Thu Sep 8 21:15:22 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 21:15:22 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 21:18:45 2022 ] Mean training loss: 0.0232.
306
+ [ Thu Sep 8 21:18:45 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 21:18:45 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 21:20:59 2022 ] Epoch 65 Curr Acc: (11056/16487)67.06%
309
+ [ Thu Sep 8 21:20:59 2022 ] Epoch 65 Best Acc 67.06%
310
+ [ Thu Sep 8 21:20:59 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 21:20:59 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 21:24:21 2022 ] Mean training loss: 0.0244.
313
+ [ Thu Sep 8 21:24:21 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 21:24:21 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 21:26:35 2022 ] Epoch 66 Curr Acc: (11123/16487)67.47%
316
+ [ Thu Sep 8 21:26:35 2022 ] Epoch 66 Best Acc 67.47%
317
+ [ Thu Sep 8 21:26:35 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 21:26:35 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 21:29:57 2022 ] Mean training loss: 0.0222.
320
+ [ Thu Sep 8 21:29:57 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 21:29:57 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 21:32:11 2022 ] Epoch 67 Curr Acc: (10701/16487)64.91%
323
+ [ Thu Sep 8 21:32:11 2022 ] Epoch 66 Best Acc 67.47%
324
+ [ Thu Sep 8 21:32:11 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 21:32:11 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 21:35:34 2022 ] Mean training loss: 0.0180.
327
+ [ Thu Sep 8 21:35:34 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 21:35:34 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 21:37:47 2022 ] Epoch 68 Curr Acc: (11027/16487)66.88%
330
+ [ Thu Sep 8 21:37:47 2022 ] Epoch 66 Best Acc 67.47%
331
+ [ Thu Sep 8 21:37:47 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 21:37:47 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 21:41:09 2022 ] Mean training loss: 0.0232.
334
+ [ Thu Sep 8 21:41:09 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 21:41:10 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 21:43:23 2022 ] Epoch 69 Curr Acc: (9883/16487)59.94%
337
+ [ Thu Sep 8 21:43:23 2022 ] Epoch 66 Best Acc 67.47%
338
+ [ Thu Sep 8 21:43:23 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 21:43:23 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 21:46:46 2022 ] Mean training loss: 0.0215.
341
+ [ Thu Sep 8 21:46:46 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 21:46:46 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 21:48:59 2022 ] Epoch 70 Curr Acc: (10901/16487)66.12%
344
+ [ Thu Sep 8 21:48:59 2022 ] Epoch 66 Best Acc 67.47%
345
+ [ Thu Sep 8 21:48:59 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 21:48:59 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 21:52:21 2022 ] Mean training loss: 0.0157.
348
+ [ Thu Sep 8 21:52:21 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 21:52:21 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 21:54:35 2022 ] Epoch 71 Curr Acc: (10940/16487)66.36%
351
+ [ Thu Sep 8 21:54:35 2022 ] Epoch 66 Best Acc 67.47%
352
+ [ Thu Sep 8 21:54:35 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 21:54:35 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 21:57:57 2022 ] Mean training loss: 0.0144.
355
+ [ Thu Sep 8 21:57:57 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 21:57:57 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 22:00:10 2022 ] Epoch 72 Curr Acc: (10979/16487)66.59%
358
+ [ Thu Sep 8 22:00:10 2022 ] Epoch 66 Best Acc 67.47%
359
+ [ Thu Sep 8 22:00:10 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 22:00:10 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 22:03:32 2022 ] Mean training loss: 0.0136.
362
+ [ Thu Sep 8 22:03:32 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 22:03:32 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 22:05:46 2022 ] Epoch 73 Curr Acc: (11004/16487)66.74%
365
+ [ Thu Sep 8 22:05:46 2022 ] Epoch 66 Best Acc 67.47%
366
+ [ Thu Sep 8 22:05:46 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 22:05:46 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 22:09:08 2022 ] Mean training loss: 0.0132.
369
+ [ Thu Sep 8 22:09:08 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 22:09:08 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 22:11:22 2022 ] Epoch 74 Curr Acc: (10837/16487)65.73%
372
+ [ Thu Sep 8 22:11:22 2022 ] Epoch 66 Best Acc 67.47%
373
+ [ Thu Sep 8 22:11:22 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 22:11:22 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 22:14:44 2022 ] Mean training loss: 0.0111.
376
+ [ Thu Sep 8 22:14:44 2022 ] Time consumption: [Data]01%, [Network]98%
377
+ [ Thu Sep 8 22:14:44 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 22:16:57 2022 ] Epoch 75 Curr Acc: (11048/16487)67.01%
379
+ [ Thu Sep 8 22:16:57 2022 ] Epoch 66 Best Acc 67.47%
380
+ [ Thu Sep 8 22:16:57 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 22:16:57 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 22:20:20 2022 ] Mean training loss: 0.0130.
383
+ [ Thu Sep 8 22:20:20 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 22:20:20 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 22:22:34 2022 ] Epoch 76 Curr Acc: (10994/16487)66.68%
386
+ [ Thu Sep 8 22:22:34 2022 ] Epoch 66 Best Acc 67.47%
387
+ [ Thu Sep 8 22:22:34 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 22:22:34 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 22:25:57 2022 ] Mean training loss: 0.0118.
390
+ [ Thu Sep 8 22:25:57 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 22:25:57 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 22:28:10 2022 ] Epoch 77 Curr Acc: (10816/16487)65.60%
393
+ [ Thu Sep 8 22:28:10 2022 ] Epoch 66 Best Acc 67.47%
394
+ [ Thu Sep 8 22:28:10 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 22:28:10 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 22:31:32 2022 ] Mean training loss: 0.0122.
397
+ [ Thu Sep 8 22:31:32 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 22:31:33 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 22:33:46 2022 ] Epoch 78 Curr Acc: (11105/16487)67.36%
400
+ [ Thu Sep 8 22:33:46 2022 ] Epoch 66 Best Acc 67.47%
401
+ [ Thu Sep 8 22:33:46 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 22:33:46 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 22:37:08 2022 ] Mean training loss: 0.0126.
404
+ [ Thu Sep 8 22:37:08 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 22:37:08 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 22:39:21 2022 ] Epoch 79 Curr Acc: (11131/16487)67.51%
407
+ [ Thu Sep 8 22:39:21 2022 ] Epoch 79 Best Acc 67.51%
408
+ [ Thu Sep 8 22:39:21 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 22:39:21 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 22:42:44 2022 ] Mean training loss: 0.0109.
411
+ [ Thu Sep 8 22:42:44 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 22:42:44 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 22:44:57 2022 ] Epoch 80 Curr Acc: (10881/16487)66.00%
414
+ [ Thu Sep 8 22:44:57 2022 ] Epoch 79 Best Acc 67.51%
415
+ [ Thu Sep 8 22:44:57 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 22:44:57 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 22:48:20 2022 ] Mean training loss: 0.0097.
418
+ [ Thu Sep 8 22:48:20 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 22:48:20 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 22:50:33 2022 ] Epoch 81 Curr Acc: (10938/16487)66.34%
421
+ [ Thu Sep 8 22:50:33 2022 ] Epoch 79 Best Acc 67.51%
422
+ [ Thu Sep 8 22:50:33 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 22:50:33 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 22:53:56 2022 ] Mean training loss: 0.0108.
425
+ [ Thu Sep 8 22:53:56 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 22:53:56 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 22:56:09 2022 ] Epoch 82 Curr Acc: (10571/16487)64.12%
428
+ [ Thu Sep 8 22:56:09 2022 ] Epoch 79 Best Acc 67.51%
429
+ [ Thu Sep 8 22:56:09 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 22:56:09 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 22:59:32 2022 ] Mean training loss: 0.0117.
432
+ [ Thu Sep 8 22:59:32 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 22:59:32 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 23:01:45 2022 ] Epoch 83 Curr Acc: (10818/16487)65.62%
435
+ [ Thu Sep 8 23:01:45 2022 ] Epoch 79 Best Acc 67.51%
436
+ [ Thu Sep 8 23:01:45 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 23:01:45 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 23:05:07 2022 ] Mean training loss: 0.0100.
439
+ [ Thu Sep 8 23:05:07 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 23:05:07 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 23:07:20 2022 ] Epoch 84 Curr Acc: (11069/16487)67.14%
442
+ [ Thu Sep 8 23:07:20 2022 ] Epoch 79 Best Acc 67.51%
443
+ [ Thu Sep 8 23:07:20 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 23:07:20 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 23:10:42 2022 ] Mean training loss: 0.0100.
446
+ [ Thu Sep 8 23:10:42 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 23:10:42 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 23:12:55 2022 ] Epoch 85 Curr Acc: (10897/16487)66.09%
449
+ [ Thu Sep 8 23:12:55 2022 ] Epoch 79 Best Acc 67.51%
450
+ [ Thu Sep 8 23:12:55 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 23:12:55 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 23:16:18 2022 ] Mean training loss: 0.0105.
453
+ [ Thu Sep 8 23:16:18 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 23:16:18 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 23:18:31 2022 ] Epoch 86 Curr Acc: (10906/16487)66.15%
456
+ [ Thu Sep 8 23:18:31 2022 ] Epoch 79 Best Acc 67.51%
457
+ [ Thu Sep 8 23:18:31 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 23:18:31 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 23:21:54 2022 ] Mean training loss: 0.0098.
460
+ [ Thu Sep 8 23:21:54 2022 ] Time consumption: [Data]01%, [Network]99%
461
+ [ Thu Sep 8 23:21:54 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 23:24:07 2022 ] Epoch 87 Curr Acc: (11064/16487)67.11%
463
+ [ Thu Sep 8 23:24:07 2022 ] Epoch 79 Best Acc 67.51%
464
+ [ Thu Sep 8 23:24:07 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 23:24:07 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 23:27:30 2022 ] Mean training loss: 0.0088.
467
+ [ Thu Sep 8 23:27:30 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 23:27:30 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 23:29:43 2022 ] Epoch 88 Curr Acc: (10900/16487)66.11%
470
+ [ Thu Sep 8 23:29:43 2022 ] Epoch 79 Best Acc 67.51%
471
+ [ Thu Sep 8 23:29:43 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 23:29:43 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 23:33:06 2022 ] Mean training loss: 0.0093.
474
+ [ Thu Sep 8 23:33:06 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 23:33:06 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 23:35:20 2022 ] Epoch 89 Curr Acc: (11142/16487)67.58%
477
+ [ Thu Sep 8 23:35:20 2022 ] Epoch 89 Best Acc 67.58%
478
+ [ Thu Sep 8 23:35:20 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 23:35:20 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 23:38:42 2022 ] Mean training loss: 0.0088.
481
+ [ Thu Sep 8 23:38:42 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 23:38:43 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 23:40:56 2022 ] Epoch 90 Curr Acc: (10824/16487)65.65%
484
+ [ Thu Sep 8 23:40:56 2022 ] Epoch 89 Best Acc 67.58%
485
+ [ Thu Sep 8 23:40:56 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 23:40:56 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 23:44:18 2022 ] Mean training loss: 0.0098.
488
+ [ Thu Sep 8 23:44:18 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 23:44:18 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 23:46:32 2022 ] Epoch 91 Curr Acc: (10916/16487)66.21%
491
+ [ Thu Sep 8 23:46:32 2022 ] Epoch 89 Best Acc 67.58%
492
+ [ Thu Sep 8 23:46:32 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 23:46:32 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 23:49:55 2022 ] Mean training loss: 0.0099.
495
+ [ Thu Sep 8 23:49:55 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 23:49:55 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 23:52:08 2022 ] Epoch 92 Curr Acc: (11208/16487)67.98%
498
+ [ Thu Sep 8 23:52:08 2022 ] Epoch 92 Best Acc 67.98%
499
+ [ Thu Sep 8 23:52:08 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 23:52:08 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 23:55:31 2022 ] Mean training loss: 0.0100.
502
+ [ Thu Sep 8 23:55:31 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 23:55:31 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 23:57:44 2022 ] Epoch 93 Curr Acc: (10882/16487)66.00%
505
+ [ Thu Sep 8 23:57:44 2022 ] Epoch 92 Best Acc 67.98%
506
+ [ Thu Sep 8 23:57:44 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 23:57:44 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Fri Sep 9 00:01:07 2022 ] Mean training loss: 0.0093.
509
+ [ Fri Sep 9 00:01:07 2022 ] Time consumption: [Data]01%, [Network]99%
510
+ [ Fri Sep 9 00:01:07 2022 ] Eval epoch: 94
511
+ [ Fri Sep 9 00:03:21 2022 ] Epoch 94 Curr Acc: (11084/16487)67.23%
512
+ [ Fri Sep 9 00:03:21 2022 ] Epoch 92 Best Acc 67.98%
513
+ [ Fri Sep 9 00:03:21 2022 ] Training epoch: 95
514
+ [ Fri Sep 9 00:03:21 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Fri Sep 9 00:06:44 2022 ] Mean training loss: 0.0097.
516
+ [ Fri Sep 9 00:06:44 2022 ] Time consumption: [Data]01%, [Network]98%
517
+ [ Fri Sep 9 00:06:44 2022 ] Eval epoch: 95
518
+ [ Fri Sep 9 00:08:57 2022 ] Epoch 95 Curr Acc: (11150/16487)67.63%
519
+ [ Fri Sep 9 00:08:57 2022 ] Epoch 92 Best Acc 67.98%
520
+ [ Fri Sep 9 00:08:57 2022 ] Training epoch: 96
521
+ [ Fri Sep 9 00:08:57 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Fri Sep 9 00:12:20 2022 ] Mean training loss: 0.0093.
523
+ [ Fri Sep 9 00:12:20 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Fri Sep 9 00:12:20 2022 ] Eval epoch: 96
525
+ [ Fri Sep 9 00:14:33 2022 ] Epoch 96 Curr Acc: (11081/16487)67.21%
526
+ [ Fri Sep 9 00:14:33 2022 ] Epoch 92 Best Acc 67.98%
527
+ [ Fri Sep 9 00:14:33 2022 ] Training epoch: 97
528
+ [ Fri Sep 9 00:14:33 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Fri Sep 9 00:17:56 2022 ] Mean training loss: 0.0097.
530
+ [ Fri Sep 9 00:17:56 2022 ] Time consumption: [Data]01%, [Network]99%
531
+ [ Fri Sep 9 00:17:56 2022 ] Eval epoch: 97
532
+ [ Fri Sep 9 00:20:09 2022 ] Epoch 97 Curr Acc: (10968/16487)66.53%
533
+ [ Fri Sep 9 00:20:09 2022 ] Epoch 92 Best Acc 67.98%
534
+ [ Fri Sep 9 00:20:09 2022 ] Training epoch: 98
535
+ [ Fri Sep 9 00:20:09 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Fri Sep 9 00:23:32 2022 ] Mean training loss: 0.0097.
537
+ [ Fri Sep 9 00:23:32 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Fri Sep 9 00:23:32 2022 ] Eval epoch: 98
539
+ [ Fri Sep 9 00:25:46 2022 ] Epoch 98 Curr Acc: (11041/16487)66.97%
540
+ [ Fri Sep 9 00:25:46 2022 ] Epoch 92 Best Acc 67.98%
541
+ [ Fri Sep 9 00:25:46 2022 ] Training epoch: 99
542
+ [ Fri Sep 9 00:25:46 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Fri Sep 9 00:29:09 2022 ] Mean training loss: 0.0093.
544
+ [ Fri Sep 9 00:29:09 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Fri Sep 9 00:29:09 2022 ] Eval epoch: 99
546
+ [ Fri Sep 9 00:31:22 2022 ] Epoch 99 Curr Acc: (11002/16487)66.73%
547
+ [ Fri Sep 9 00:31:22 2022 ] Epoch 92 Best Acc 67.98%
548
+ [ Fri Sep 9 00:31:22 2022 ] Training epoch: 100
549
+ [ Fri Sep 9 00:31:22 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Fri Sep 9 00:34:45 2022 ] Mean training loss: 0.0097.
551
+ [ Fri Sep 9 00:34:45 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Fri Sep 9 00:34:45 2022 ] Eval epoch: 100
553
+ [ Fri Sep 9 00:36:59 2022 ] Epoch 100 Curr Acc: (11154/16487)67.65%
554
+ [ Fri Sep 9 00:36:59 2022 ] Epoch 92 Best Acc 67.98%
555
+ [ Fri Sep 9 00:36:59 2022 ] Training epoch: 101
556
+ [ Fri Sep 9 00:36:59 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Fri Sep 9 00:40:22 2022 ] Mean training loss: 0.0089.
558
+ [ Fri Sep 9 00:40:22 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Fri Sep 9 00:40:22 2022 ] Eval epoch: 101
560
+ [ Fri Sep 9 00:42:35 2022 ] Epoch 101 Curr Acc: (10980/16487)66.60%
561
+ [ Fri Sep 9 00:42:35 2022 ] Epoch 92 Best Acc 67.98%
562
+ [ Fri Sep 9 00:42:35 2022 ] Training epoch: 102
563
+ [ Fri Sep 9 00:42:35 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Fri Sep 9 00:45:59 2022 ] Mean training loss: 0.0097.
565
+ [ Fri Sep 9 00:45:59 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Fri Sep 9 00:45:59 2022 ] Eval epoch: 102
567
+ [ Fri Sep 9 00:48:12 2022 ] Epoch 102 Curr Acc: (10838/16487)65.74%
568
+ [ Fri Sep 9 00:48:12 2022 ] Epoch 92 Best Acc 67.98%
569
+ [ Fri Sep 9 00:48:12 2022 ] Training epoch: 103
570
+ [ Fri Sep 9 00:48:12 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Fri Sep 9 00:51:35 2022 ] Mean training loss: 0.0100.
572
+ [ Fri Sep 9 00:51:35 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Fri Sep 9 00:51:35 2022 ] Eval epoch: 103
574
+ [ Fri Sep 9 00:53:48 2022 ] Epoch 103 Curr Acc: (10786/16487)65.42%
575
+ [ Fri Sep 9 00:53:48 2022 ] Epoch 92 Best Acc 67.98%
576
+ [ Fri Sep 9 00:53:48 2022 ] Training epoch: 104
577
+ [ Fri Sep 9 00:53:48 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Fri Sep 9 00:57:11 2022 ] Mean training loss: 0.0086.
579
+ [ Fri Sep 9 00:57:11 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Fri Sep 9 00:57:11 2022 ] Eval epoch: 104
581
+ [ Fri Sep 9 00:59:24 2022 ] Epoch 104 Curr Acc: (10835/16487)65.72%
582
+ [ Fri Sep 9 00:59:24 2022 ] Epoch 92 Best Acc 67.98%
583
+ [ Fri Sep 9 00:59:24 2022 ] Training epoch: 105
584
+ [ Fri Sep 9 00:59:24 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Fri Sep 9 01:02:47 2022 ] Mean training loss: 0.0086.
586
+ [ Fri Sep 9 01:02:47 2022 ] Time consumption: [Data]01%, [Network]98%
587
+ [ Fri Sep 9 01:02:47 2022 ] Eval epoch: 105
588
+ [ Fri Sep 9 01:05:00 2022 ] Epoch 105 Curr Acc: (10931/16487)66.30%
589
+ [ Fri Sep 9 01:05:00 2022 ] Epoch 92 Best Acc 67.98%
590
+ [ Fri Sep 9 01:05:00 2022 ] Training epoch: 106
591
+ [ Fri Sep 9 01:05:00 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Fri Sep 9 01:08:23 2022 ] Mean training loss: 0.0101.
593
+ [ Fri Sep 9 01:08:23 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Fri Sep 9 01:08:23 2022 ] Eval epoch: 106
595
+ [ Fri Sep 9 01:10:36 2022 ] Epoch 106 Curr Acc: (10672/16487)64.73%
596
+ [ Fri Sep 9 01:10:36 2022 ] Epoch 92 Best Acc 67.98%
597
+ [ Fri Sep 9 01:10:36 2022 ] Training epoch: 107
598
+ [ Fri Sep 9 01:10:36 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Fri Sep 9 01:13:59 2022 ] Mean training loss: 0.0087.
600
+ [ Fri Sep 9 01:13:59 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Fri Sep 9 01:13:59 2022 ] Eval epoch: 107
602
+ [ Fri Sep 9 01:16:12 2022 ] Epoch 107 Curr Acc: (10891/16487)66.06%
603
+ [ Fri Sep 9 01:16:12 2022 ] Epoch 92 Best Acc 67.98%
604
+ [ Fri Sep 9 01:16:12 2022 ] Training epoch: 108
605
+ [ Fri Sep 9 01:16:12 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Fri Sep 9 01:19:35 2022 ] Mean training loss: 0.0089.
607
+ [ Fri Sep 9 01:19:35 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Fri Sep 9 01:19:35 2022 ] Eval epoch: 108
609
+ [ Fri Sep 9 01:21:48 2022 ] Epoch 108 Curr Acc: (10792/16487)65.46%
610
+ [ Fri Sep 9 01:21:48 2022 ] Epoch 92 Best Acc 67.98%
611
+ [ Fri Sep 9 01:21:48 2022 ] Training epoch: 109
612
+ [ Fri Sep 9 01:21:48 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Fri Sep 9 01:25:10 2022 ] Mean training loss: 0.0100.
614
+ [ Fri Sep 9 01:25:10 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Fri Sep 9 01:25:11 2022 ] Eval epoch: 109
616
+ [ Fri Sep 9 01:27:24 2022 ] Epoch 109 Curr Acc: (10863/16487)65.89%
617
+ [ Fri Sep 9 01:27:24 2022 ] Epoch 92 Best Acc 67.98%
618
+ [ Fri Sep 9 01:27:24 2022 ] Training epoch: 110
619
+ [ Fri Sep 9 01:27:24 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Fri Sep 9 01:30:46 2022 ] Mean training loss: 0.0086.
621
+ [ Fri Sep 9 01:30:46 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Fri Sep 9 01:30:46 2022 ] Eval epoch: 110
623
+ [ Fri Sep 9 01:32:59 2022 ] Epoch 110 Curr Acc: (10705/16487)64.93%
624
+ [ Fri Sep 9 01:32:59 2022 ] Epoch 92 Best Acc 67.98%
625
+ [ Fri Sep 9 01:32:59 2022 ] epoch: 92, best accuracy: 0.679808333838782
626
+ [ Fri Sep 9 01:32:59 2022 ] Experiment: ./work_dir/ntu/xsub_bm
627
+ [ Fri Sep 9 01:33:00 2022 ] # generator parameters: 2.896055 M.
628
+ [ Fri Sep 9 01:33:00 2022 ] Load weights from ./runs/ntu/xsub_bm/runs-91-90712.pt.
629
+ [ Fri Sep 9 01:33:00 2022 ] Eval epoch: 1
630
+ [ Fri Sep 9 01:35:12 2022 ] Epoch 1 Curr Acc: (11208/16487)67.98%
631
+ [ Fri Sep 9 01:35:12 2022 ] Epoch 92 Best Acc 67.98%
ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu/xsub_j.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 60
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu/xsub_j/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu/xsub_j
ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67269cd275552d91bf25ddf60f2b6ff09b6b67c78439c39e1f342ee49ca819ec
3
+ size 4979902
ckpt/Others/MST-GCN/ntu60_xsub/xsub_j/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 8 17:07:45 2022 ] # generator parameters: 2.896055 M.
2
+ [ Thu Sep 8 17:07:45 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu/xsub_j', 'model_saved_name': './runs/ntu/xsub_j/runs', 'config': 'config/ntu/xsub_j.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Thu Sep 8 17:07:45 2022 ] Training epoch: 1
6
+ [ Thu Sep 8 17:07:45 2022 ] Learning rate: 0.015
7
+ [ Thu Sep 8 17:11:08 2022 ] Mean training loss: 3.0195.
8
+ [ Thu Sep 8 17:11:08 2022 ] Time consumption: [Data]02%, [Network]98%
9
+ [ Thu Sep 8 17:11:08 2022 ] Training epoch: 2
10
+ [ Thu Sep 8 17:11:08 2022 ] Learning rate: 0.03
11
+ [ Thu Sep 8 17:14:31 2022 ] Mean training loss: 2.1926.
12
+ [ Thu Sep 8 17:14:31 2022 ] Time consumption: [Data]02%, [Network]98%
13
+ [ Thu Sep 8 17:14:31 2022 ] Training epoch: 3
14
+ [ Thu Sep 8 17:14:31 2022 ] Learning rate: 0.045
15
+ [ Thu Sep 8 17:17:52 2022 ] Mean training loss: 1.8503.
16
+ [ Thu Sep 8 17:17:52 2022 ] Time consumption: [Data]02%, [Network]98%
17
+ [ Thu Sep 8 17:17:52 2022 ] Training epoch: 4
18
+ [ Thu Sep 8 17:17:52 2022 ] Learning rate: 0.06
19
+ [ Thu Sep 8 17:21:14 2022 ] Mean training loss: 1.6219.
20
+ [ Thu Sep 8 17:21:14 2022 ] Time consumption: [Data]01%, [Network]98%
21
+ [ Thu Sep 8 17:21:14 2022 ] Training epoch: 5
22
+ [ Thu Sep 8 17:21:14 2022 ] Learning rate: 0.075
23
+ [ Thu Sep 8 17:24:35 2022 ] Mean training loss: 1.4539.
24
+ [ Thu Sep 8 17:24:35 2022 ] Time consumption: [Data]01%, [Network]98%
25
+ [ Thu Sep 8 17:24:35 2022 ] Training epoch: 6
26
+ [ Thu Sep 8 17:24:35 2022 ] Learning rate: 0.09
27
+ [ Thu Sep 8 17:27:58 2022 ] Mean training loss: 1.3453.
28
+ [ Thu Sep 8 17:27:58 2022 ] Time consumption: [Data]01%, [Network]98%
29
+ [ Thu Sep 8 17:27:58 2022 ] Training epoch: 7
30
+ [ Thu Sep 8 17:27:58 2022 ] Learning rate: 0.10500000000000001
31
+ [ Thu Sep 8 17:31:19 2022 ] Mean training loss: 1.2608.
32
+ [ Thu Sep 8 17:31:19 2022 ] Time consumption: [Data]01%, [Network]98%
33
+ [ Thu Sep 8 17:31:19 2022 ] Training epoch: 8
34
+ [ Thu Sep 8 17:31:19 2022 ] Learning rate: 0.12
35
+ [ Thu Sep 8 17:34:41 2022 ] Mean training loss: 1.2075.
36
+ [ Thu Sep 8 17:34:41 2022 ] Time consumption: [Data]02%, [Network]98%
37
+ [ Thu Sep 8 17:34:41 2022 ] Training epoch: 9
38
+ [ Thu Sep 8 17:34:41 2022 ] Learning rate: 0.13499999999999998
39
+ [ Thu Sep 8 17:38:03 2022 ] Mean training loss: 1.1737.
40
+ [ Thu Sep 8 17:38:03 2022 ] Time consumption: [Data]01%, [Network]98%
41
+ [ Thu Sep 8 17:38:03 2022 ] Training epoch: 10
42
+ [ Thu Sep 8 17:38:03 2022 ] Learning rate: 0.15
43
+ [ Thu Sep 8 17:41:25 2022 ] Mean training loss: 1.1203.
44
+ [ Thu Sep 8 17:41:25 2022 ] Time consumption: [Data]01%, [Network]98%
45
+ [ Thu Sep 8 17:41:25 2022 ] Training epoch: 11
46
+ [ Thu Sep 8 17:41:25 2022 ] Learning rate: 0.15
47
+ [ Thu Sep 8 17:44:48 2022 ] Mean training loss: 1.0656.
48
+ [ Thu Sep 8 17:44:48 2022 ] Time consumption: [Data]02%, [Network]98%
49
+ [ Thu Sep 8 17:44:48 2022 ] Training epoch: 12
50
+ [ Thu Sep 8 17:44:48 2022 ] Learning rate: 0.15
51
+ [ Thu Sep 8 17:48:09 2022 ] Mean training loss: 1.0205.
52
+ [ Thu Sep 8 17:48:09 2022 ] Time consumption: [Data]01%, [Network]98%
53
+ [ Thu Sep 8 17:48:09 2022 ] Training epoch: 13
54
+ [ Thu Sep 8 17:48:09 2022 ] Learning rate: 0.15
55
+ [ Thu Sep 8 17:51:31 2022 ] Mean training loss: 0.9889.
56
+ [ Thu Sep 8 17:51:31 2022 ] Time consumption: [Data]01%, [Network]98%
57
+ [ Thu Sep 8 17:51:31 2022 ] Training epoch: 14
58
+ [ Thu Sep 8 17:51:31 2022 ] Learning rate: 0.15
59
+ [ Thu Sep 8 17:54:53 2022 ] Mean training loss: 0.9602.
60
+ [ Thu Sep 8 17:54:53 2022 ] Time consumption: [Data]02%, [Network]98%
61
+ [ Thu Sep 8 17:54:53 2022 ] Training epoch: 15
62
+ [ Thu Sep 8 17:54:53 2022 ] Learning rate: 0.15
63
+ [ Thu Sep 8 17:58:15 2022 ] Mean training loss: 0.9204.
64
+ [ Thu Sep 8 17:58:15 2022 ] Time consumption: [Data]01%, [Network]98%
65
+ [ Thu Sep 8 17:58:15 2022 ] Training epoch: 16
66
+ [ Thu Sep 8 17:58:15 2022 ] Learning rate: 0.15
67
+ [ Thu Sep 8 18:01:37 2022 ] Mean training loss: 0.8923.
68
+ [ Thu Sep 8 18:01:37 2022 ] Time consumption: [Data]02%, [Network]98%
69
+ [ Thu Sep 8 18:01:37 2022 ] Training epoch: 17
70
+ [ Thu Sep 8 18:01:37 2022 ] Learning rate: 0.15
71
+ [ Thu Sep 8 18:04:59 2022 ] Mean training loss: 0.8704.
72
+ [ Thu Sep 8 18:04:59 2022 ] Time consumption: [Data]02%, [Network]98%
73
+ [ Thu Sep 8 18:04:59 2022 ] Training epoch: 18
74
+ [ Thu Sep 8 18:04:59 2022 ] Learning rate: 0.15
75
+ [ Thu Sep 8 18:08:21 2022 ] Mean training loss: 0.8575.
76
+ [ Thu Sep 8 18:08:21 2022 ] Time consumption: [Data]01%, [Network]98%
77
+ [ Thu Sep 8 18:08:21 2022 ] Training epoch: 19
78
+ [ Thu Sep 8 18:08:21 2022 ] Learning rate: 0.15
79
+ [ Thu Sep 8 18:11:42 2022 ] Mean training loss: 0.8183.
80
+ [ Thu Sep 8 18:11:42 2022 ] Time consumption: [Data]01%, [Network]98%
81
+ [ Thu Sep 8 18:11:42 2022 ] Training epoch: 20
82
+ [ Thu Sep 8 18:11:42 2022 ] Learning rate: 0.15
83
+ [ Thu Sep 8 18:15:04 2022 ] Mean training loss: 0.8045.
84
+ [ Thu Sep 8 18:15:04 2022 ] Time consumption: [Data]01%, [Network]98%
85
+ [ Thu Sep 8 18:15:04 2022 ] Training epoch: 21
86
+ [ Thu Sep 8 18:15:04 2022 ] Learning rate: 0.15
87
+ [ Thu Sep 8 18:18:26 2022 ] Mean training loss: 0.8075.
88
+ [ Thu Sep 8 18:18:26 2022 ] Time consumption: [Data]02%, [Network]98%
89
+ [ Thu Sep 8 18:18:26 2022 ] Training epoch: 22
90
+ [ Thu Sep 8 18:18:26 2022 ] Learning rate: 0.15
91
+ [ Thu Sep 8 18:21:49 2022 ] Mean training loss: 0.7965.
92
+ [ Thu Sep 8 18:21:49 2022 ] Time consumption: [Data]02%, [Network]98%
93
+ [ Thu Sep 8 18:21:49 2022 ] Training epoch: 23
94
+ [ Thu Sep 8 18:21:49 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 18:25:11 2022 ] Mean training loss: 0.7809.
96
+ [ Thu Sep 8 18:25:11 2022 ] Time consumption: [Data]02%, [Network]98%
97
+ [ Thu Sep 8 18:25:11 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 18:25:11 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 18:28:32 2022 ] Mean training loss: 0.7482.
100
+ [ Thu Sep 8 18:28:32 2022 ] Time consumption: [Data]01%, [Network]98%
101
+ [ Thu Sep 8 18:28:32 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 18:28:32 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 18:31:54 2022 ] Mean training loss: 0.7497.
104
+ [ Thu Sep 8 18:31:54 2022 ] Time consumption: [Data]01%, [Network]98%
105
+ [ Thu Sep 8 18:31:54 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 18:31:54 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 18:35:16 2022 ] Mean training loss: 0.7401.
108
+ [ Thu Sep 8 18:35:16 2022 ] Time consumption: [Data]02%, [Network]98%
109
+ [ Thu Sep 8 18:35:16 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 18:35:16 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 18:38:38 2022 ] Mean training loss: 0.7310.
112
+ [ Thu Sep 8 18:38:38 2022 ] Time consumption: [Data]02%, [Network]98%
113
+ [ Thu Sep 8 18:38:38 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 18:38:38 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 18:42:00 2022 ] Mean training loss: 0.7214.
116
+ [ Thu Sep 8 18:42:00 2022 ] Time consumption: [Data]01%, [Network]98%
117
+ [ Thu Sep 8 18:42:00 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 18:42:00 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 18:45:22 2022 ] Mean training loss: 0.6995.
120
+ [ Thu Sep 8 18:45:22 2022 ] Time consumption: [Data]02%, [Network]98%
121
+ [ Thu Sep 8 18:45:22 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 18:45:22 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 18:48:44 2022 ] Mean training loss: 0.6931.
124
+ [ Thu Sep 8 18:48:44 2022 ] Time consumption: [Data]02%, [Network]98%
125
+ [ Thu Sep 8 18:48:44 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 18:48:44 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 18:52:06 2022 ] Mean training loss: 0.6972.
128
+ [ Thu Sep 8 18:52:06 2022 ] Time consumption: [Data]01%, [Network]98%
129
+ [ Thu Sep 8 18:52:06 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 18:52:06 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 18:55:28 2022 ] Mean training loss: 0.6870.
132
+ [ Thu Sep 8 18:55:28 2022 ] Time consumption: [Data]02%, [Network]98%
133
+ [ Thu Sep 8 18:55:28 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 18:55:28 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 18:58:49 2022 ] Mean training loss: 0.6716.
136
+ [ Thu Sep 8 18:58:49 2022 ] Time consumption: [Data]02%, [Network]98%
137
+ [ Thu Sep 8 18:58:49 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 18:58:49 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 19:02:11 2022 ] Mean training loss: 0.6840.
140
+ [ Thu Sep 8 19:02:11 2022 ] Time consumption: [Data]02%, [Network]98%
141
+ [ Thu Sep 8 19:02:11 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 19:02:11 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 19:05:33 2022 ] Mean training loss: 0.6804.
144
+ [ Thu Sep 8 19:05:33 2022 ] Time consumption: [Data]02%, [Network]98%
145
+ [ Thu Sep 8 19:05:33 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 19:05:33 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 19:08:55 2022 ] Mean training loss: 0.6658.
148
+ [ Thu Sep 8 19:08:55 2022 ] Time consumption: [Data]01%, [Network]98%
149
+ [ Thu Sep 8 19:08:55 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 19:08:55 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 19:12:17 2022 ] Mean training loss: 0.6654.
152
+ [ Thu Sep 8 19:12:17 2022 ] Time consumption: [Data]01%, [Network]98%
153
+ [ Thu Sep 8 19:12:17 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 19:12:17 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 19:15:38 2022 ] Mean training loss: 0.6568.
156
+ [ Thu Sep 8 19:15:38 2022 ] Time consumption: [Data]02%, [Network]98%
157
+ [ Thu Sep 8 19:15:38 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 19:15:38 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 19:19:00 2022 ] Mean training loss: 0.6439.
160
+ [ Thu Sep 8 19:19:00 2022 ] Time consumption: [Data]02%, [Network]98%
161
+ [ Thu Sep 8 19:19:00 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 19:19:00 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 19:22:21 2022 ] Mean training loss: 0.6398.
164
+ [ Thu Sep 8 19:22:21 2022 ] Time consumption: [Data]01%, [Network]98%
165
+ [ Thu Sep 8 19:22:21 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 19:22:21 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 19:25:42 2022 ] Mean training loss: 0.6464.
168
+ [ Thu Sep 8 19:25:42 2022 ] Time consumption: [Data]02%, [Network]98%
169
+ [ Thu Sep 8 19:25:42 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 19:25:42 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 19:29:02 2022 ] Mean training loss: 0.6393.
172
+ [ Thu Sep 8 19:29:02 2022 ] Time consumption: [Data]01%, [Network]98%
173
+ [ Thu Sep 8 19:29:02 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 19:29:02 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 19:32:24 2022 ] Mean training loss: 0.6244.
176
+ [ Thu Sep 8 19:32:24 2022 ] Time consumption: [Data]02%, [Network]98%
177
+ [ Thu Sep 8 19:32:24 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 19:32:24 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 19:35:44 2022 ] Mean training loss: 0.6277.
180
+ [ Thu Sep 8 19:35:44 2022 ] Time consumption: [Data]01%, [Network]98%
181
+ [ Thu Sep 8 19:35:44 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 19:35:44 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 19:39:05 2022 ] Mean training loss: 0.6040.
184
+ [ Thu Sep 8 19:39:05 2022 ] Time consumption: [Data]01%, [Network]98%
185
+ [ Thu Sep 8 19:39:05 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 19:39:05 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 19:42:27 2022 ] Mean training loss: 0.6229.
188
+ [ Thu Sep 8 19:42:27 2022 ] Time consumption: [Data]02%, [Network]98%
189
+ [ Thu Sep 8 19:42:27 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 19:42:27 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 19:45:49 2022 ] Mean training loss: 0.6233.
192
+ [ Thu Sep 8 19:45:49 2022 ] Time consumption: [Data]01%, [Network]98%
193
+ [ Thu Sep 8 19:45:49 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 19:45:49 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 19:49:11 2022 ] Mean training loss: 0.6187.
196
+ [ Thu Sep 8 19:49:11 2022 ] Time consumption: [Data]01%, [Network]98%
197
+ [ Thu Sep 8 19:49:11 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 19:49:11 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 19:52:33 2022 ] Mean training loss: 0.6209.
200
+ [ Thu Sep 8 19:52:33 2022 ] Time consumption: [Data]01%, [Network]98%
201
+ [ Thu Sep 8 19:52:33 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 19:52:33 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 19:55:55 2022 ] Mean training loss: 0.6215.
204
+ [ Thu Sep 8 19:55:55 2022 ] Time consumption: [Data]01%, [Network]98%
205
+ [ Thu Sep 8 19:55:55 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 19:55:55 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 19:59:17 2022 ] Mean training loss: 0.3007.
208
+ [ Thu Sep 8 19:59:17 2022 ] Time consumption: [Data]01%, [Network]98%
209
+ [ Thu Sep 8 19:59:17 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 20:01:29 2022 ] Epoch 51 Curr Acc: (10902/16487)66.12%
211
+ [ Thu Sep 8 20:01:29 2022 ] Epoch 51 Best Acc 66.12%
212
+ [ Thu Sep 8 20:01:29 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 20:01:29 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 20:04:51 2022 ] Mean training loss: 0.2042.
215
+ [ Thu Sep 8 20:04:51 2022 ] Time consumption: [Data]02%, [Network]98%
216
+ [ Thu Sep 8 20:04:51 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 20:07:04 2022 ] Epoch 52 Curr Acc: (11412/16487)69.22%
218
+ [ Thu Sep 8 20:07:04 2022 ] Epoch 52 Best Acc 69.22%
219
+ [ Thu Sep 8 20:07:04 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 20:07:04 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 20:10:26 2022 ] Mean training loss: 0.1695.
222
+ [ Thu Sep 8 20:10:26 2022 ] Time consumption: [Data]02%, [Network]98%
223
+ [ Thu Sep 8 20:10:26 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 20:12:38 2022 ] Epoch 53 Curr Acc: (11367/16487)68.95%
225
+ [ Thu Sep 8 20:12:38 2022 ] Epoch 52 Best Acc 69.22%
226
+ [ Thu Sep 8 20:12:38 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 20:12:38 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 20:16:00 2022 ] Mean training loss: 0.1382.
229
+ [ Thu Sep 8 20:16:00 2022 ] Time consumption: [Data]02%, [Network]98%
230
+ [ Thu Sep 8 20:16:00 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 20:18:13 2022 ] Epoch 54 Curr Acc: (11563/16487)70.13%
232
+ [ Thu Sep 8 20:18:13 2022 ] Epoch 54 Best Acc 70.13%
233
+ [ Thu Sep 8 20:18:13 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 20:18:13 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 20:21:35 2022 ] Mean training loss: 0.1144.
236
+ [ Thu Sep 8 20:21:35 2022 ] Time consumption: [Data]02%, [Network]98%
237
+ [ Thu Sep 8 20:21:35 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 20:23:47 2022 ] Epoch 55 Curr Acc: (11322/16487)68.67%
239
+ [ Thu Sep 8 20:23:47 2022 ] Epoch 54 Best Acc 70.13%
240
+ [ Thu Sep 8 20:23:47 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 20:23:47 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 20:27:09 2022 ] Mean training loss: 0.1028.
243
+ [ Thu Sep 8 20:27:09 2022 ] Time consumption: [Data]02%, [Network]98%
244
+ [ Thu Sep 8 20:27:09 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 20:29:22 2022 ] Epoch 56 Curr Acc: (11107/16487)67.37%
246
+ [ Thu Sep 8 20:29:22 2022 ] Epoch 54 Best Acc 70.13%
247
+ [ Thu Sep 8 20:29:22 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 20:29:22 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 20:32:44 2022 ] Mean training loss: 0.0895.
250
+ [ Thu Sep 8 20:32:44 2022 ] Time consumption: [Data]02%, [Network]98%
251
+ [ Thu Sep 8 20:32:44 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 20:34:56 2022 ] Epoch 57 Curr Acc: (11374/16487)68.99%
253
+ [ Thu Sep 8 20:34:56 2022 ] Epoch 54 Best Acc 70.13%
254
+ [ Thu Sep 8 20:34:56 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 20:34:56 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 20:38:18 2022 ] Mean training loss: 0.0792.
257
+ [ Thu Sep 8 20:38:18 2022 ] Time consumption: [Data]02%, [Network]98%
258
+ [ Thu Sep 8 20:38:18 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 20:40:31 2022 ] Epoch 58 Curr Acc: (11445/16487)69.42%
260
+ [ Thu Sep 8 20:40:31 2022 ] Epoch 54 Best Acc 70.13%
261
+ [ Thu Sep 8 20:40:31 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 20:40:31 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 20:43:52 2022 ] Mean training loss: 0.0660.
264
+ [ Thu Sep 8 20:43:52 2022 ] Time consumption: [Data]02%, [Network]98%
265
+ [ Thu Sep 8 20:43:52 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 20:46:04 2022 ] Epoch 59 Curr Acc: (11110/16487)67.39%
267
+ [ Thu Sep 8 20:46:04 2022 ] Epoch 54 Best Acc 70.13%
268
+ [ Thu Sep 8 20:46:04 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 20:46:04 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 20:49:26 2022 ] Mean training loss: 0.0554.
271
+ [ Thu Sep 8 20:49:26 2022 ] Time consumption: [Data]02%, [Network]98%
272
+ [ Thu Sep 8 20:49:26 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 20:51:38 2022 ] Epoch 60 Curr Acc: (10888/16487)66.04%
274
+ [ Thu Sep 8 20:51:38 2022 ] Epoch 54 Best Acc 70.13%
275
+ [ Thu Sep 8 20:51:38 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 20:51:38 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 20:55:00 2022 ] Mean training loss: 0.0540.
278
+ [ Thu Sep 8 20:55:00 2022 ] Time consumption: [Data]02%, [Network]98%
279
+ [ Thu Sep 8 20:55:00 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 20:57:13 2022 ] Epoch 61 Curr Acc: (11119/16487)67.44%
281
+ [ Thu Sep 8 20:57:13 2022 ] Epoch 54 Best Acc 70.13%
282
+ [ Thu Sep 8 20:57:13 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 20:57:13 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 21:00:34 2022 ] Mean training loss: 0.0477.
285
+ [ Thu Sep 8 21:00:34 2022 ] Time consumption: [Data]02%, [Network]98%
286
+ [ Thu Sep 8 21:00:34 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 21:02:47 2022 ] Epoch 62 Curr Acc: (11069/16487)67.14%
288
+ [ Thu Sep 8 21:02:47 2022 ] Epoch 54 Best Acc 70.13%
289
+ [ Thu Sep 8 21:02:47 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 21:02:47 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 21:06:09 2022 ] Mean training loss: 0.0419.
292
+ [ Thu Sep 8 21:06:09 2022 ] Time consumption: [Data]02%, [Network]98%
293
+ [ Thu Sep 8 21:06:09 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 21:08:21 2022 ] Epoch 63 Curr Acc: (10941/16487)66.36%
295
+ [ Thu Sep 8 21:08:21 2022 ] Epoch 54 Best Acc 70.13%
296
+ [ Thu Sep 8 21:08:21 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 21:08:21 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 21:11:43 2022 ] Mean training loss: 0.0414.
299
+ [ Thu Sep 8 21:11:43 2022 ] Time consumption: [Data]02%, [Network]98%
300
+ [ Thu Sep 8 21:11:43 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 21:13:56 2022 ] Epoch 64 Curr Acc: (11043/16487)66.98%
302
+ [ Thu Sep 8 21:13:56 2022 ] Epoch 54 Best Acc 70.13%
303
+ [ Thu Sep 8 21:13:56 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 21:13:56 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 21:17:17 2022 ] Mean training loss: 0.0403.
306
+ [ Thu Sep 8 21:17:17 2022 ] Time consumption: [Data]01%, [Network]98%
307
+ [ Thu Sep 8 21:17:17 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 21:19:30 2022 ] Epoch 65 Curr Acc: (11271/16487)68.36%
309
+ [ Thu Sep 8 21:19:30 2022 ] Epoch 54 Best Acc 70.13%
310
+ [ Thu Sep 8 21:19:30 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 21:19:30 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 21:22:52 2022 ] Mean training loss: 0.0337.
313
+ [ Thu Sep 8 21:22:52 2022 ] Time consumption: [Data]02%, [Network]98%
314
+ [ Thu Sep 8 21:22:52 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 21:25:04 2022 ] Epoch 66 Curr Acc: (11390/16487)69.08%
316
+ [ Thu Sep 8 21:25:04 2022 ] Epoch 54 Best Acc 70.13%
317
+ [ Thu Sep 8 21:25:04 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 21:25:04 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 21:28:26 2022 ] Mean training loss: 0.0312.
320
+ [ Thu Sep 8 21:28:26 2022 ] Time consumption: [Data]01%, [Network]98%
321
+ [ Thu Sep 8 21:28:26 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 21:30:38 2022 ] Epoch 67 Curr Acc: (11022/16487)66.85%
323
+ [ Thu Sep 8 21:30:38 2022 ] Epoch 54 Best Acc 70.13%
324
+ [ Thu Sep 8 21:30:38 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 21:30:38 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 21:34:00 2022 ] Mean training loss: 0.0294.
327
+ [ Thu Sep 8 21:34:00 2022 ] Time consumption: [Data]01%, [Network]98%
328
+ [ Thu Sep 8 21:34:00 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 21:36:13 2022 ] Epoch 68 Curr Acc: (11086/16487)67.24%
330
+ [ Thu Sep 8 21:36:13 2022 ] Epoch 54 Best Acc 70.13%
331
+ [ Thu Sep 8 21:36:13 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 21:36:13 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 21:39:34 2022 ] Mean training loss: 0.0278.
334
+ [ Thu Sep 8 21:39:34 2022 ] Time consumption: [Data]02%, [Network]98%
335
+ [ Thu Sep 8 21:39:34 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 21:41:47 2022 ] Epoch 69 Curr Acc: (11356/16487)68.88%
337
+ [ Thu Sep 8 21:41:47 2022 ] Epoch 54 Best Acc 70.13%
338
+ [ Thu Sep 8 21:41:47 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 21:41:47 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 21:45:09 2022 ] Mean training loss: 0.0266.
341
+ [ Thu Sep 8 21:45:09 2022 ] Time consumption: [Data]01%, [Network]98%
342
+ [ Thu Sep 8 21:45:09 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 21:47:21 2022 ] Epoch 70 Curr Acc: (11207/16487)67.97%
344
+ [ Thu Sep 8 21:47:21 2022 ] Epoch 54 Best Acc 70.13%
345
+ [ Thu Sep 8 21:47:21 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 21:47:21 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 21:50:43 2022 ] Mean training loss: 0.0207.
348
+ [ Thu Sep 8 21:50:43 2022 ] Time consumption: [Data]02%, [Network]98%
349
+ [ Thu Sep 8 21:50:43 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 21:52:55 2022 ] Epoch 71 Curr Acc: (11328/16487)68.71%
351
+ [ Thu Sep 8 21:52:55 2022 ] Epoch 54 Best Acc 70.13%
352
+ [ Thu Sep 8 21:52:55 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 21:52:55 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 21:56:18 2022 ] Mean training loss: 0.0185.
355
+ [ Thu Sep 8 21:56:18 2022 ] Time consumption: [Data]01%, [Network]98%
356
+ [ Thu Sep 8 21:56:18 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 21:58:30 2022 ] Epoch 72 Curr Acc: (11160/16487)67.69%
358
+ [ Thu Sep 8 21:58:30 2022 ] Epoch 54 Best Acc 70.13%
359
+ [ Thu Sep 8 21:58:30 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 21:58:30 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 22:01:51 2022 ] Mean training loss: 0.0202.
362
+ [ Thu Sep 8 22:01:51 2022 ] Time consumption: [Data]02%, [Network]98%
363
+ [ Thu Sep 8 22:01:51 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 22:04:04 2022 ] Epoch 73 Curr Acc: (11321/16487)68.67%
365
+ [ Thu Sep 8 22:04:04 2022 ] Epoch 54 Best Acc 70.13%
366
+ [ Thu Sep 8 22:04:04 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 22:04:04 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 22:07:25 2022 ] Mean training loss: 0.0174.
369
+ [ Thu Sep 8 22:07:25 2022 ] Time consumption: [Data]02%, [Network]98%
370
+ [ Thu Sep 8 22:07:26 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 22:09:38 2022 ] Epoch 74 Curr Acc: (11098/16487)67.31%
372
+ [ Thu Sep 8 22:09:38 2022 ] Epoch 54 Best Acc 70.13%
373
+ [ Thu Sep 8 22:09:38 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 22:09:38 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 22:13:00 2022 ] Mean training loss: 0.0149.
376
+ [ Thu Sep 8 22:13:00 2022 ] Time consumption: [Data]02%, [Network]98%
377
+ [ Thu Sep 8 22:13:00 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 22:15:12 2022 ] Epoch 75 Curr Acc: (11172/16487)67.76%
379
+ [ Thu Sep 8 22:15:12 2022 ] Epoch 54 Best Acc 70.13%
380
+ [ Thu Sep 8 22:15:13 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 22:15:13 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 22:18:34 2022 ] Mean training loss: 0.0162.
383
+ [ Thu Sep 8 22:18:34 2022 ] Time consumption: [Data]02%, [Network]98%
384
+ [ Thu Sep 8 22:18:34 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 22:20:47 2022 ] Epoch 76 Curr Acc: (11432/16487)69.34%
386
+ [ Thu Sep 8 22:20:47 2022 ] Epoch 54 Best Acc 70.13%
387
+ [ Thu Sep 8 22:20:47 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 22:20:47 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 22:24:08 2022 ] Mean training loss: 0.0139.
390
+ [ Thu Sep 8 22:24:08 2022 ] Time consumption: [Data]02%, [Network]98%
391
+ [ Thu Sep 8 22:24:09 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 22:26:21 2022 ] Epoch 77 Curr Acc: (11165/16487)67.72%
393
+ [ Thu Sep 8 22:26:21 2022 ] Epoch 54 Best Acc 70.13%
394
+ [ Thu Sep 8 22:26:21 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 22:26:21 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 22:29:43 2022 ] Mean training loss: 0.0148.
397
+ [ Thu Sep 8 22:29:43 2022 ] Time consumption: [Data]02%, [Network]98%
398
+ [ Thu Sep 8 22:29:43 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 22:31:55 2022 ] Epoch 78 Curr Acc: (11269/16487)68.35%
400
+ [ Thu Sep 8 22:31:55 2022 ] Epoch 54 Best Acc 70.13%
401
+ [ Thu Sep 8 22:31:55 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 22:31:55 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 22:35:17 2022 ] Mean training loss: 0.0151.
404
+ [ Thu Sep 8 22:35:17 2022 ] Time consumption: [Data]02%, [Network]98%
405
+ [ Thu Sep 8 22:35:17 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 22:37:30 2022 ] Epoch 79 Curr Acc: (11329/16487)68.71%
407
+ [ Thu Sep 8 22:37:30 2022 ] Epoch 54 Best Acc 70.13%
408
+ [ Thu Sep 8 22:37:30 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 22:37:30 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 22:40:52 2022 ] Mean training loss: 0.0136.
411
+ [ Thu Sep 8 22:40:52 2022 ] Time consumption: [Data]01%, [Network]98%
412
+ [ Thu Sep 8 22:40:52 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 22:43:04 2022 ] Epoch 80 Curr Acc: (11280/16487)68.42%
414
+ [ Thu Sep 8 22:43:04 2022 ] Epoch 54 Best Acc 70.13%
415
+ [ Thu Sep 8 22:43:04 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 22:43:04 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 22:46:26 2022 ] Mean training loss: 0.0149.
418
+ [ Thu Sep 8 22:46:26 2022 ] Time consumption: [Data]02%, [Network]98%
419
+ [ Thu Sep 8 22:46:26 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 22:48:38 2022 ] Epoch 81 Curr Acc: (11301/16487)68.54%
421
+ [ Thu Sep 8 22:48:38 2022 ] Epoch 54 Best Acc 70.13%
422
+ [ Thu Sep 8 22:48:38 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 22:48:38 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 22:52:00 2022 ] Mean training loss: 0.0144.
425
+ [ Thu Sep 8 22:52:00 2022 ] Time consumption: [Data]01%, [Network]98%
426
+ [ Thu Sep 8 22:52:00 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 22:54:12 2022 ] Epoch 82 Curr Acc: (10996/16487)66.69%
428
+ [ Thu Sep 8 22:54:12 2022 ] Epoch 54 Best Acc 70.13%
429
+ [ Thu Sep 8 22:54:12 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 22:54:12 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 22:57:34 2022 ] Mean training loss: 0.0143.
432
+ [ Thu Sep 8 22:57:34 2022 ] Time consumption: [Data]02%, [Network]98%
433
+ [ Thu Sep 8 22:57:34 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 22:59:47 2022 ] Epoch 83 Curr Acc: (11310/16487)68.60%
435
+ [ Thu Sep 8 22:59:47 2022 ] Epoch 54 Best Acc 70.13%
436
+ [ Thu Sep 8 22:59:47 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 22:59:47 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 23:03:09 2022 ] Mean training loss: 0.0149.
439
+ [ Thu Sep 8 23:03:09 2022 ] Time consumption: [Data]02%, [Network]98%
440
+ [ Thu Sep 8 23:03:09 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 23:05:21 2022 ] Epoch 84 Curr Acc: (11367/16487)68.95%
442
+ [ Thu Sep 8 23:05:21 2022 ] Epoch 54 Best Acc 70.13%
443
+ [ Thu Sep 8 23:05:21 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 23:05:21 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 23:08:43 2022 ] Mean training loss: 0.0117.
446
+ [ Thu Sep 8 23:08:43 2022 ] Time consumption: [Data]02%, [Network]98%
447
+ [ Thu Sep 8 23:08:43 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 23:10:56 2022 ] Epoch 85 Curr Acc: (11345/16487)68.81%
449
+ [ Thu Sep 8 23:10:56 2022 ] Epoch 54 Best Acc 70.13%
450
+ [ Thu Sep 8 23:10:56 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 23:10:56 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 23:14:17 2022 ] Mean training loss: 0.0130.
453
+ [ Thu Sep 8 23:14:17 2022 ] Time consumption: [Data]02%, [Network]98%
454
+ [ Thu Sep 8 23:14:17 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 23:16:30 2022 ] Epoch 86 Curr Acc: (11269/16487)68.35%
456
+ [ Thu Sep 8 23:16:30 2022 ] Epoch 54 Best Acc 70.13%
457
+ [ Thu Sep 8 23:16:30 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 23:16:30 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 23:19:51 2022 ] Mean training loss: 0.0118.
460
+ [ Thu Sep 8 23:19:51 2022 ] Time consumption: [Data]01%, [Network]98%
461
+ [ Thu Sep 8 23:19:51 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 23:22:03 2022 ] Epoch 87 Curr Acc: (11509/16487)69.81%
463
+ [ Thu Sep 8 23:22:03 2022 ] Epoch 54 Best Acc 70.13%
464
+ [ Thu Sep 8 23:22:03 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 23:22:03 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 23:25:25 2022 ] Mean training loss: 0.0112.
467
+ [ Thu Sep 8 23:25:25 2022 ] Time consumption: [Data]01%, [Network]98%
468
+ [ Thu Sep 8 23:25:25 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 23:27:38 2022 ] Epoch 88 Curr Acc: (11416/16487)69.24%
470
+ [ Thu Sep 8 23:27:38 2022 ] Epoch 54 Best Acc 70.13%
471
+ [ Thu Sep 8 23:27:38 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 23:27:38 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 23:31:00 2022 ] Mean training loss: 0.0118.
474
+ [ Thu Sep 8 23:31:00 2022 ] Time consumption: [Data]02%, [Network]98%
475
+ [ Thu Sep 8 23:31:00 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 23:33:12 2022 ] Epoch 89 Curr Acc: (11423/16487)69.28%
477
+ [ Thu Sep 8 23:33:12 2022 ] Epoch 54 Best Acc 70.13%
478
+ [ Thu Sep 8 23:33:12 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 23:33:12 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 23:36:34 2022 ] Mean training loss: 0.0142.
481
+ [ Thu Sep 8 23:36:34 2022 ] Time consumption: [Data]02%, [Network]98%
482
+ [ Thu Sep 8 23:36:34 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 23:38:46 2022 ] Epoch 90 Curr Acc: (11305/16487)68.57%
484
+ [ Thu Sep 8 23:38:46 2022 ] Epoch 54 Best Acc 70.13%
485
+ [ Thu Sep 8 23:38:46 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 23:38:46 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 23:42:08 2022 ] Mean training loss: 0.0120.
488
+ [ Thu Sep 8 23:42:08 2022 ] Time consumption: [Data]02%, [Network]98%
489
+ [ Thu Sep 8 23:42:08 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 23:44:20 2022 ] Epoch 91 Curr Acc: (11478/16487)69.62%
491
+ [ Thu Sep 8 23:44:20 2022 ] Epoch 54 Best Acc 70.13%
492
+ [ Thu Sep 8 23:44:20 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 23:44:20 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 23:47:42 2022 ] Mean training loss: 0.0129.
495
+ [ Thu Sep 8 23:47:42 2022 ] Time consumption: [Data]02%, [Network]98%
496
+ [ Thu Sep 8 23:47:42 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 23:49:54 2022 ] Epoch 92 Curr Acc: (11338/16487)68.77%
498
+ [ Thu Sep 8 23:49:54 2022 ] Epoch 54 Best Acc 70.13%
499
+ [ Thu Sep 8 23:49:54 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 23:49:54 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 23:53:16 2022 ] Mean training loss: 0.0124.
502
+ [ Thu Sep 8 23:53:16 2022 ] Time consumption: [Data]02%, [Network]98%
503
+ [ Thu Sep 8 23:53:16 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 23:55:28 2022 ] Epoch 93 Curr Acc: (11374/16487)68.99%
505
+ [ Thu Sep 8 23:55:28 2022 ] Epoch 54 Best Acc 70.13%
506
+ [ Thu Sep 8 23:55:28 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 23:55:28 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 23:58:50 2022 ] Mean training loss: 0.0114.
509
+ [ Thu Sep 8 23:58:50 2022 ] Time consumption: [Data]02%, [Network]98%
510
+ [ Thu Sep 8 23:58:50 2022 ] Eval epoch: 94
511
+ [ Fri Sep 9 00:01:03 2022 ] Epoch 94 Curr Acc: (11406/16487)69.18%
512
+ [ Fri Sep 9 00:01:03 2022 ] Epoch 54 Best Acc 70.13%
513
+ [ Fri Sep 9 00:01:03 2022 ] Training epoch: 95
514
+ [ Fri Sep 9 00:01:03 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Fri Sep 9 00:04:25 2022 ] Mean training loss: 0.0121.
516
+ [ Fri Sep 9 00:04:25 2022 ] Time consumption: [Data]02%, [Network]98%
517
+ [ Fri Sep 9 00:04:25 2022 ] Eval epoch: 95
518
+ [ Fri Sep 9 00:06:38 2022 ] Epoch 95 Curr Acc: (11247/16487)68.22%
519
+ [ Fri Sep 9 00:06:38 2022 ] Epoch 54 Best Acc 70.13%
520
+ [ Fri Sep 9 00:06:38 2022 ] Training epoch: 96
521
+ [ Fri Sep 9 00:06:38 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Fri Sep 9 00:10:00 2022 ] Mean training loss: 0.0120.
523
+ [ Fri Sep 9 00:10:00 2022 ] Time consumption: [Data]02%, [Network]98%
524
+ [ Fri Sep 9 00:10:00 2022 ] Eval epoch: 96
525
+ [ Fri Sep 9 00:12:12 2022 ] Epoch 96 Curr Acc: (11538/16487)69.98%
526
+ [ Fri Sep 9 00:12:12 2022 ] Epoch 54 Best Acc 70.13%
527
+ [ Fri Sep 9 00:12:12 2022 ] Training epoch: 97
528
+ [ Fri Sep 9 00:12:12 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Fri Sep 9 00:15:34 2022 ] Mean training loss: 0.0118.
530
+ [ Fri Sep 9 00:15:34 2022 ] Time consumption: [Data]02%, [Network]98%
531
+ [ Fri Sep 9 00:15:34 2022 ] Eval epoch: 97
532
+ [ Fri Sep 9 00:17:47 2022 ] Epoch 97 Curr Acc: (11263/16487)68.31%
533
+ [ Fri Sep 9 00:17:47 2022 ] Epoch 54 Best Acc 70.13%
534
+ [ Fri Sep 9 00:17:47 2022 ] Training epoch: 98
535
+ [ Fri Sep 9 00:17:47 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Fri Sep 9 00:21:09 2022 ] Mean training loss: 0.0121.
537
+ [ Fri Sep 9 00:21:09 2022 ] Time consumption: [Data]02%, [Network]98%
538
+ [ Fri Sep 9 00:21:09 2022 ] Eval epoch: 98
539
+ [ Fri Sep 9 00:23:22 2022 ] Epoch 98 Curr Acc: (11427/16487)69.31%
540
+ [ Fri Sep 9 00:23:22 2022 ] Epoch 54 Best Acc 70.13%
541
+ [ Fri Sep 9 00:23:22 2022 ] Training epoch: 99
542
+ [ Fri Sep 9 00:23:22 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Fri Sep 9 00:26:44 2022 ] Mean training loss: 0.0116.
544
+ [ Fri Sep 9 00:26:44 2022 ] Time consumption: [Data]02%, [Network]98%
545
+ [ Fri Sep 9 00:26:44 2022 ] Eval epoch: 99
546
+ [ Fri Sep 9 00:28:57 2022 ] Epoch 99 Curr Acc: (11217/16487)68.04%
547
+ [ Fri Sep 9 00:28:57 2022 ] Epoch 54 Best Acc 70.13%
548
+ [ Fri Sep 9 00:28:57 2022 ] Training epoch: 100
549
+ [ Fri Sep 9 00:28:57 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Fri Sep 9 00:32:18 2022 ] Mean training loss: 0.0122.
551
+ [ Fri Sep 9 00:32:18 2022 ] Time consumption: [Data]02%, [Network]98%
552
+ [ Fri Sep 9 00:32:18 2022 ] Eval epoch: 100
553
+ [ Fri Sep 9 00:34:31 2022 ] Epoch 100 Curr Acc: (11455/16487)69.48%
554
+ [ Fri Sep 9 00:34:31 2022 ] Epoch 54 Best Acc 70.13%
555
+ [ Fri Sep 9 00:34:31 2022 ] Training epoch: 101
556
+ [ Fri Sep 9 00:34:31 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Fri Sep 9 00:37:52 2022 ] Mean training loss: 0.0125.
558
+ [ Fri Sep 9 00:37:52 2022 ] Time consumption: [Data]02%, [Network]98%
559
+ [ Fri Sep 9 00:37:52 2022 ] Eval epoch: 101
560
+ [ Fri Sep 9 00:40:05 2022 ] Epoch 101 Curr Acc: (11545/16487)70.02%
561
+ [ Fri Sep 9 00:40:05 2022 ] Epoch 54 Best Acc 70.13%
562
+ [ Fri Sep 9 00:40:05 2022 ] Training epoch: 102
563
+ [ Fri Sep 9 00:40:05 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Fri Sep 9 00:43:27 2022 ] Mean training loss: 0.0123.
565
+ [ Fri Sep 9 00:43:27 2022 ] Time consumption: [Data]02%, [Network]98%
566
+ [ Fri Sep 9 00:43:27 2022 ] Eval epoch: 102
567
+ [ Fri Sep 9 00:45:39 2022 ] Epoch 102 Curr Acc: (11452/16487)69.46%
568
+ [ Fri Sep 9 00:45:39 2022 ] Epoch 54 Best Acc 70.13%
569
+ [ Fri Sep 9 00:45:39 2022 ] Training epoch: 103
570
+ [ Fri Sep 9 00:45:39 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Fri Sep 9 00:49:01 2022 ] Mean training loss: 0.0130.
572
+ [ Fri Sep 9 00:49:01 2022 ] Time consumption: [Data]02%, [Network]98%
573
+ [ Fri Sep 9 00:49:01 2022 ] Eval epoch: 103
574
+ [ Fri Sep 9 00:51:13 2022 ] Epoch 103 Curr Acc: (11380/16487)69.02%
575
+ [ Fri Sep 9 00:51:13 2022 ] Epoch 54 Best Acc 70.13%
576
+ [ Fri Sep 9 00:51:13 2022 ] Training epoch: 104
577
+ [ Fri Sep 9 00:51:13 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Fri Sep 9 00:54:34 2022 ] Mean training loss: 0.0120.
579
+ [ Fri Sep 9 00:54:34 2022 ] Time consumption: [Data]02%, [Network]98%
580
+ [ Fri Sep 9 00:54:34 2022 ] Eval epoch: 104
581
+ [ Fri Sep 9 00:56:47 2022 ] Epoch 104 Curr Acc: (11517/16487)69.86%
582
+ [ Fri Sep 9 00:56:47 2022 ] Epoch 54 Best Acc 70.13%
583
+ [ Fri Sep 9 00:56:47 2022 ] Training epoch: 105
584
+ [ Fri Sep 9 00:56:47 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Fri Sep 9 01:00:08 2022 ] Mean training loss: 0.0142.
586
+ [ Fri Sep 9 01:00:08 2022 ] Time consumption: [Data]02%, [Network]98%
587
+ [ Fri Sep 9 01:00:08 2022 ] Eval epoch: 105
588
+ [ Fri Sep 9 01:02:21 2022 ] Epoch 105 Curr Acc: (11195/16487)67.90%
589
+ [ Fri Sep 9 01:02:21 2022 ] Epoch 54 Best Acc 70.13%
590
+ [ Fri Sep 9 01:02:21 2022 ] Training epoch: 106
591
+ [ Fri Sep 9 01:02:21 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Fri Sep 9 01:05:42 2022 ] Mean training loss: 0.0107.
593
+ [ Fri Sep 9 01:05:42 2022 ] Time consumption: [Data]02%, [Network]98%
594
+ [ Fri Sep 9 01:05:42 2022 ] Eval epoch: 106
595
+ [ Fri Sep 9 01:07:55 2022 ] Epoch 106 Curr Acc: (11496/16487)69.73%
596
+ [ Fri Sep 9 01:07:55 2022 ] Epoch 54 Best Acc 70.13%
597
+ [ Fri Sep 9 01:07:55 2022 ] Training epoch: 107
598
+ [ Fri Sep 9 01:07:55 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Fri Sep 9 01:11:17 2022 ] Mean training loss: 0.0116.
600
+ [ Fri Sep 9 01:11:17 2022 ] Time consumption: [Data]01%, [Network]98%
601
+ [ Fri Sep 9 01:11:17 2022 ] Eval epoch: 107
602
+ [ Fri Sep 9 01:13:29 2022 ] Epoch 107 Curr Acc: (11146/16487)67.60%
603
+ [ Fri Sep 9 01:13:29 2022 ] Epoch 54 Best Acc 70.13%
604
+ [ Fri Sep 9 01:13:29 2022 ] Training epoch: 108
605
+ [ Fri Sep 9 01:13:29 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Fri Sep 9 01:16:51 2022 ] Mean training loss: 0.0110.
607
+ [ Fri Sep 9 01:16:51 2022 ] Time consumption: [Data]01%, [Network]98%
608
+ [ Fri Sep 9 01:16:51 2022 ] Eval epoch: 108
609
+ [ Fri Sep 9 01:19:03 2022 ] Epoch 108 Curr Acc: (11175/16487)67.78%
610
+ [ Fri Sep 9 01:19:04 2022 ] Epoch 54 Best Acc 70.13%
611
+ [ Fri Sep 9 01:19:04 2022 ] Training epoch: 109
612
+ [ Fri Sep 9 01:19:04 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Fri Sep 9 01:22:26 2022 ] Mean training loss: 0.0119.
614
+ [ Fri Sep 9 01:22:26 2022 ] Time consumption: [Data]01%, [Network]98%
615
+ [ Fri Sep 9 01:22:26 2022 ] Eval epoch: 109
616
+ [ Fri Sep 9 01:24:38 2022 ] Epoch 109 Curr Acc: (11179/16487)67.80%
617
+ [ Fri Sep 9 01:24:38 2022 ] Epoch 54 Best Acc 70.13%
618
+ [ Fri Sep 9 01:24:38 2022 ] Training epoch: 110
619
+ [ Fri Sep 9 01:24:38 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Fri Sep 9 01:28:00 2022 ] Mean training loss: 0.0118.
621
+ [ Fri Sep 9 01:28:00 2022 ] Time consumption: [Data]02%, [Network]98%
622
+ [ Fri Sep 9 01:28:00 2022 ] Eval epoch: 110
623
+ [ Fri Sep 9 01:30:12 2022 ] Epoch 110 Curr Acc: (11249/16487)68.23%
624
+ [ Fri Sep 9 01:30:12 2022 ] Epoch 54 Best Acc 70.13%
625
+ [ Fri Sep 9 01:30:12 2022 ] epoch: 54, best accuracy: 0.7013404500515558
626
+ [ Fri Sep 9 01:30:12 2022 ] Experiment: ./work_dir/ntu/xsub_j
627
+ [ Fri Sep 9 01:30:12 2022 ] # generator parameters: 2.896055 M.
628
+ [ Fri Sep 9 01:30:12 2022 ] Load weights from ./runs/ntu/xsub_j/runs-53-53244.pt.
629
+ [ Fri Sep 9 01:30:12 2022 ] Eval epoch: 1
630
+ [ Fri Sep 9 01:32:25 2022 ] Epoch 1 Curr Acc: (11563/16487)70.13%
631
+ [ Fri Sep 9 01:32:25 2022 ] Epoch 54 Best Acc 70.13%
ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu/xsub_jm.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 60
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu/xsub_jm/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu/xsub_jm
ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/epoch1_test_score.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a12cdadad8f353eda1d8d0c2215e1b376b764e52df61fea5d94dc748ff426cea
3
+ size 4979902
ckpt/Others/MST-GCN/ntu60_xsub/xsub_jm/log.txt ADDED
@@ -0,0 +1,631 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [ Thu Sep 8 17:07:46 2022 ] # generator parameters: 2.896055 M.
2
+ [ Thu Sep 8 17:07:46 2022 ] Parameters:
3
+ {'work_dir': './work_dir/ntu/xsub_jm', 'model_saved_name': './runs/ntu/xsub_jm/runs', 'config': 'config/ntu/xsub_jm.yaml', 'phase': 'train', 'save_score': True, 'seed': 1, 'log_interval': 100, 'save_interval': 1, 'eval_interval': 5, 'print_log': True, 'show_topk': [1, 5], 'feeder': 'feeders.feeder.Feeder', 'num_worker': 32, 'train_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/train_label.pkl', 'debug': False, 'random_choose': False, 'random_shift': False, 'random_move': False, 'window_size': -1, 'normalization': False}, 'test_feeder_args': {'data_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_data_joint_motion.npy', 'label_path': '/data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xsub/val_label.pkl'}, 'model': 'model.AEMST_GCN.Model', 'model_args': {'num_class': 60, 'num_point': 25, 'num_person': 2, 'block_args': [[3, 112, False, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 112, True, 1], [112, 224, True, 2], [224, 224, True, 1], [224, 224, True, 1], [224, 448, True, 2], [448, 448, True, 1], [448, 448, True, 1]], 'graph': 'graph.ntu_rgb_d.Graph', 'graph_args': {'labeling_mode': 'spatial'}, 'kernel_size': 9, 'block_type': 'ms', 'reduct_ratio': 2, 'expand_ratio': 0, 's_scale': 4, 't_scale': 4, 'layer_type': 'basic', 'act': 'relu', 'atten': 'None', 'bias': True}, 'weights': None, 'ignore_weights': [], 'base_lr': 0.15, 'step': [50, 70, 90], 'device': [0], 'optimizer': 'SGD', 'nesterov': True, 'batch_size': 8, 'test_batch_size': 64, 'start_epoch': 0, 'model_path': '', 'num_epoch': 110, 'weight_decay': 0.0001, 'only_train_part': False, 'only_train_epoch': 0, 'warm_up_epoch': 10, 'local_rank': 0}
4
+
5
+ [ Thu Sep 8 17:07:46 2022 ] Training epoch: 1
6
+ [ Thu Sep 8 17:07:46 2022 ] Learning rate: 0.015
7
+ [ Thu Sep 8 17:11:08 2022 ] Mean training loss: 3.0683.
8
+ [ Thu Sep 8 17:11:08 2022 ] Time consumption: [Data]02%, [Network]98%
9
+ [ Thu Sep 8 17:11:08 2022 ] Training epoch: 2
10
+ [ Thu Sep 8 17:11:08 2022 ] Learning rate: 0.03
11
+ [ Thu Sep 8 17:14:28 2022 ] Mean training loss: 2.2041.
12
+ [ Thu Sep 8 17:14:28 2022 ] Time consumption: [Data]01%, [Network]99%
13
+ [ Thu Sep 8 17:14:28 2022 ] Training epoch: 3
14
+ [ Thu Sep 8 17:14:28 2022 ] Learning rate: 0.045
15
+ [ Thu Sep 8 17:17:48 2022 ] Mean training loss: 1.7924.
16
+ [ Thu Sep 8 17:17:48 2022 ] Time consumption: [Data]01%, [Network]99%
17
+ [ Thu Sep 8 17:17:48 2022 ] Training epoch: 4
18
+ [ Thu Sep 8 17:17:48 2022 ] Learning rate: 0.06
19
+ [ Thu Sep 8 17:21:09 2022 ] Mean training loss: 1.5479.
20
+ [ Thu Sep 8 17:21:09 2022 ] Time consumption: [Data]01%, [Network]99%
21
+ [ Thu Sep 8 17:21:09 2022 ] Training epoch: 5
22
+ [ Thu Sep 8 17:21:09 2022 ] Learning rate: 0.075
23
+ [ Thu Sep 8 17:24:29 2022 ] Mean training loss: 1.3728.
24
+ [ Thu Sep 8 17:24:29 2022 ] Time consumption: [Data]01%, [Network]99%
25
+ [ Thu Sep 8 17:24:29 2022 ] Training epoch: 6
26
+ [ Thu Sep 8 17:24:29 2022 ] Learning rate: 0.09
27
+ [ Thu Sep 8 17:27:50 2022 ] Mean training loss: 1.2656.
28
+ [ Thu Sep 8 17:27:50 2022 ] Time consumption: [Data]01%, [Network]99%
29
+ [ Thu Sep 8 17:27:50 2022 ] Training epoch: 7
30
+ [ Thu Sep 8 17:27:50 2022 ] Learning rate: 0.10500000000000001
31
+ [ Thu Sep 8 17:31:11 2022 ] Mean training loss: 1.1959.
32
+ [ Thu Sep 8 17:31:11 2022 ] Time consumption: [Data]01%, [Network]99%
33
+ [ Thu Sep 8 17:31:11 2022 ] Training epoch: 8
34
+ [ Thu Sep 8 17:31:11 2022 ] Learning rate: 0.12
35
+ [ Thu Sep 8 17:34:32 2022 ] Mean training loss: 1.1293.
36
+ [ Thu Sep 8 17:34:32 2022 ] Time consumption: [Data]01%, [Network]99%
37
+ [ Thu Sep 8 17:34:32 2022 ] Training epoch: 9
38
+ [ Thu Sep 8 17:34:32 2022 ] Learning rate: 0.13499999999999998
39
+ [ Thu Sep 8 17:37:53 2022 ] Mean training loss: 1.1130.
40
+ [ Thu Sep 8 17:37:53 2022 ] Time consumption: [Data]01%, [Network]99%
41
+ [ Thu Sep 8 17:37:53 2022 ] Training epoch: 10
42
+ [ Thu Sep 8 17:37:53 2022 ] Learning rate: 0.15
43
+ [ Thu Sep 8 17:41:15 2022 ] Mean training loss: 1.0630.
44
+ [ Thu Sep 8 17:41:15 2022 ] Time consumption: [Data]01%, [Network]99%
45
+ [ Thu Sep 8 17:41:15 2022 ] Training epoch: 11
46
+ [ Thu Sep 8 17:41:15 2022 ] Learning rate: 0.15
47
+ [ Thu Sep 8 17:44:36 2022 ] Mean training loss: 1.0072.
48
+ [ Thu Sep 8 17:44:36 2022 ] Time consumption: [Data]01%, [Network]99%
49
+ [ Thu Sep 8 17:44:36 2022 ] Training epoch: 12
50
+ [ Thu Sep 8 17:44:36 2022 ] Learning rate: 0.15
51
+ [ Thu Sep 8 17:47:57 2022 ] Mean training loss: 0.9853.
52
+ [ Thu Sep 8 17:47:57 2022 ] Time consumption: [Data]01%, [Network]99%
53
+ [ Thu Sep 8 17:47:57 2022 ] Training epoch: 13
54
+ [ Thu Sep 8 17:47:57 2022 ] Learning rate: 0.15
55
+ [ Thu Sep 8 17:51:18 2022 ] Mean training loss: 0.9481.
56
+ [ Thu Sep 8 17:51:18 2022 ] Time consumption: [Data]01%, [Network]99%
57
+ [ Thu Sep 8 17:51:18 2022 ] Training epoch: 14
58
+ [ Thu Sep 8 17:51:18 2022 ] Learning rate: 0.15
59
+ [ Thu Sep 8 17:54:39 2022 ] Mean training loss: 0.8977.
60
+ [ Thu Sep 8 17:54:39 2022 ] Time consumption: [Data]01%, [Network]99%
61
+ [ Thu Sep 8 17:54:39 2022 ] Training epoch: 15
62
+ [ Thu Sep 8 17:54:39 2022 ] Learning rate: 0.15
63
+ [ Thu Sep 8 17:57:59 2022 ] Mean training loss: 0.8794.
64
+ [ Thu Sep 8 17:57:59 2022 ] Time consumption: [Data]01%, [Network]99%
65
+ [ Thu Sep 8 17:57:59 2022 ] Training epoch: 16
66
+ [ Thu Sep 8 17:57:59 2022 ] Learning rate: 0.15
67
+ [ Thu Sep 8 18:01:20 2022 ] Mean training loss: 0.8747.
68
+ [ Thu Sep 8 18:01:20 2022 ] Time consumption: [Data]01%, [Network]99%
69
+ [ Thu Sep 8 18:01:20 2022 ] Training epoch: 17
70
+ [ Thu Sep 8 18:01:20 2022 ] Learning rate: 0.15
71
+ [ Thu Sep 8 18:04:40 2022 ] Mean training loss: 0.8299.
72
+ [ Thu Sep 8 18:04:40 2022 ] Time consumption: [Data]01%, [Network]99%
73
+ [ Thu Sep 8 18:04:40 2022 ] Training epoch: 18
74
+ [ Thu Sep 8 18:04:40 2022 ] Learning rate: 0.15
75
+ [ Thu Sep 8 18:08:01 2022 ] Mean training loss: 0.8331.
76
+ [ Thu Sep 8 18:08:01 2022 ] Time consumption: [Data]01%, [Network]99%
77
+ [ Thu Sep 8 18:08:01 2022 ] Training epoch: 19
78
+ [ Thu Sep 8 18:08:01 2022 ] Learning rate: 0.15
79
+ [ Thu Sep 8 18:11:22 2022 ] Mean training loss: 0.7954.
80
+ [ Thu Sep 8 18:11:22 2022 ] Time consumption: [Data]01%, [Network]99%
81
+ [ Thu Sep 8 18:11:22 2022 ] Training epoch: 20
82
+ [ Thu Sep 8 18:11:22 2022 ] Learning rate: 0.15
83
+ [ Thu Sep 8 18:14:43 2022 ] Mean training loss: 0.7783.
84
+ [ Thu Sep 8 18:14:43 2022 ] Time consumption: [Data]01%, [Network]99%
85
+ [ Thu Sep 8 18:14:43 2022 ] Training epoch: 21
86
+ [ Thu Sep 8 18:14:43 2022 ] Learning rate: 0.15
87
+ [ Thu Sep 8 18:18:05 2022 ] Mean training loss: 0.7715.
88
+ [ Thu Sep 8 18:18:05 2022 ] Time consumption: [Data]01%, [Network]99%
89
+ [ Thu Sep 8 18:18:05 2022 ] Training epoch: 22
90
+ [ Thu Sep 8 18:18:05 2022 ] Learning rate: 0.15
91
+ [ Thu Sep 8 18:21:26 2022 ] Mean training loss: 0.7561.
92
+ [ Thu Sep 8 18:21:26 2022 ] Time consumption: [Data]01%, [Network]99%
93
+ [ Thu Sep 8 18:21:26 2022 ] Training epoch: 23
94
+ [ Thu Sep 8 18:21:26 2022 ] Learning rate: 0.15
95
+ [ Thu Sep 8 18:24:48 2022 ] Mean training loss: 0.7335.
96
+ [ Thu Sep 8 18:24:48 2022 ] Time consumption: [Data]01%, [Network]99%
97
+ [ Thu Sep 8 18:24:48 2022 ] Training epoch: 24
98
+ [ Thu Sep 8 18:24:48 2022 ] Learning rate: 0.15
99
+ [ Thu Sep 8 18:28:09 2022 ] Mean training loss: 0.7450.
100
+ [ Thu Sep 8 18:28:09 2022 ] Time consumption: [Data]01%, [Network]99%
101
+ [ Thu Sep 8 18:28:09 2022 ] Training epoch: 25
102
+ [ Thu Sep 8 18:28:09 2022 ] Learning rate: 0.15
103
+ [ Thu Sep 8 18:31:30 2022 ] Mean training loss: 0.7210.
104
+ [ Thu Sep 8 18:31:30 2022 ] Time consumption: [Data]01%, [Network]98%
105
+ [ Thu Sep 8 18:31:30 2022 ] Training epoch: 26
106
+ [ Thu Sep 8 18:31:30 2022 ] Learning rate: 0.15
107
+ [ Thu Sep 8 18:34:49 2022 ] Mean training loss: 0.7067.
108
+ [ Thu Sep 8 18:34:49 2022 ] Time consumption: [Data]01%, [Network]99%
109
+ [ Thu Sep 8 18:34:49 2022 ] Training epoch: 27
110
+ [ Thu Sep 8 18:34:49 2022 ] Learning rate: 0.15
111
+ [ Thu Sep 8 18:38:10 2022 ] Mean training loss: 0.7015.
112
+ [ Thu Sep 8 18:38:10 2022 ] Time consumption: [Data]01%, [Network]99%
113
+ [ Thu Sep 8 18:38:10 2022 ] Training epoch: 28
114
+ [ Thu Sep 8 18:38:10 2022 ] Learning rate: 0.15
115
+ [ Thu Sep 8 18:41:30 2022 ] Mean training loss: 0.6972.
116
+ [ Thu Sep 8 18:41:30 2022 ] Time consumption: [Data]01%, [Network]99%
117
+ [ Thu Sep 8 18:41:30 2022 ] Training epoch: 29
118
+ [ Thu Sep 8 18:41:30 2022 ] Learning rate: 0.15
119
+ [ Thu Sep 8 18:44:51 2022 ] Mean training loss: 0.6854.
120
+ [ Thu Sep 8 18:44:51 2022 ] Time consumption: [Data]01%, [Network]99%
121
+ [ Thu Sep 8 18:44:51 2022 ] Training epoch: 30
122
+ [ Thu Sep 8 18:44:51 2022 ] Learning rate: 0.15
123
+ [ Thu Sep 8 18:48:11 2022 ] Mean training loss: 0.6598.
124
+ [ Thu Sep 8 18:48:11 2022 ] Time consumption: [Data]01%, [Network]99%
125
+ [ Thu Sep 8 18:48:11 2022 ] Training epoch: 31
126
+ [ Thu Sep 8 18:48:11 2022 ] Learning rate: 0.15
127
+ [ Thu Sep 8 18:51:32 2022 ] Mean training loss: 0.6721.
128
+ [ Thu Sep 8 18:51:32 2022 ] Time consumption: [Data]01%, [Network]99%
129
+ [ Thu Sep 8 18:51:32 2022 ] Training epoch: 32
130
+ [ Thu Sep 8 18:51:32 2022 ] Learning rate: 0.15
131
+ [ Thu Sep 8 18:54:53 2022 ] Mean training loss: 0.6566.
132
+ [ Thu Sep 8 18:54:53 2022 ] Time consumption: [Data]01%, [Network]99%
133
+ [ Thu Sep 8 18:54:53 2022 ] Training epoch: 33
134
+ [ Thu Sep 8 18:54:53 2022 ] Learning rate: 0.15
135
+ [ Thu Sep 8 18:58:14 2022 ] Mean training loss: 0.6786.
136
+ [ Thu Sep 8 18:58:14 2022 ] Time consumption: [Data]01%, [Network]99%
137
+ [ Thu Sep 8 18:58:14 2022 ] Training epoch: 34
138
+ [ Thu Sep 8 18:58:14 2022 ] Learning rate: 0.15
139
+ [ Thu Sep 8 19:01:35 2022 ] Mean training loss: 0.6565.
140
+ [ Thu Sep 8 19:01:35 2022 ] Time consumption: [Data]01%, [Network]99%
141
+ [ Thu Sep 8 19:01:35 2022 ] Training epoch: 35
142
+ [ Thu Sep 8 19:01:35 2022 ] Learning rate: 0.15
143
+ [ Thu Sep 8 19:04:56 2022 ] Mean training loss: 0.6696.
144
+ [ Thu Sep 8 19:04:56 2022 ] Time consumption: [Data]01%, [Network]99%
145
+ [ Thu Sep 8 19:04:56 2022 ] Training epoch: 36
146
+ [ Thu Sep 8 19:04:56 2022 ] Learning rate: 0.15
147
+ [ Thu Sep 8 19:08:17 2022 ] Mean training loss: 0.6123.
148
+ [ Thu Sep 8 19:08:17 2022 ] Time consumption: [Data]01%, [Network]98%
149
+ [ Thu Sep 8 19:08:17 2022 ] Training epoch: 37
150
+ [ Thu Sep 8 19:08:17 2022 ] Learning rate: 0.15
151
+ [ Thu Sep 8 19:11:37 2022 ] Mean training loss: 0.6360.
152
+ [ Thu Sep 8 19:11:37 2022 ] Time consumption: [Data]01%, [Network]99%
153
+ [ Thu Sep 8 19:11:37 2022 ] Training epoch: 38
154
+ [ Thu Sep 8 19:11:37 2022 ] Learning rate: 0.15
155
+ [ Thu Sep 8 19:14:59 2022 ] Mean training loss: 0.6435.
156
+ [ Thu Sep 8 19:14:59 2022 ] Time consumption: [Data]01%, [Network]99%
157
+ [ Thu Sep 8 19:14:59 2022 ] Training epoch: 39
158
+ [ Thu Sep 8 19:14:59 2022 ] Learning rate: 0.15
159
+ [ Thu Sep 8 19:18:20 2022 ] Mean training loss: 0.6291.
160
+ [ Thu Sep 8 19:18:20 2022 ] Time consumption: [Data]01%, [Network]99%
161
+ [ Thu Sep 8 19:18:20 2022 ] Training epoch: 40
162
+ [ Thu Sep 8 19:18:20 2022 ] Learning rate: 0.15
163
+ [ Thu Sep 8 19:21:42 2022 ] Mean training loss: 0.6281.
164
+ [ Thu Sep 8 19:21:42 2022 ] Time consumption: [Data]01%, [Network]98%
165
+ [ Thu Sep 8 19:21:42 2022 ] Training epoch: 41
166
+ [ Thu Sep 8 19:21:42 2022 ] Learning rate: 0.15
167
+ [ Thu Sep 8 19:25:03 2022 ] Mean training loss: 0.6269.
168
+ [ Thu Sep 8 19:25:03 2022 ] Time consumption: [Data]01%, [Network]98%
169
+ [ Thu Sep 8 19:25:03 2022 ] Training epoch: 42
170
+ [ Thu Sep 8 19:25:03 2022 ] Learning rate: 0.15
171
+ [ Thu Sep 8 19:28:24 2022 ] Mean training loss: 0.6014.
172
+ [ Thu Sep 8 19:28:24 2022 ] Time consumption: [Data]01%, [Network]99%
173
+ [ Thu Sep 8 19:28:24 2022 ] Training epoch: 43
174
+ [ Thu Sep 8 19:28:24 2022 ] Learning rate: 0.15
175
+ [ Thu Sep 8 19:31:45 2022 ] Mean training loss: 0.6220.
176
+ [ Thu Sep 8 19:31:45 2022 ] Time consumption: [Data]01%, [Network]99%
177
+ [ Thu Sep 8 19:31:45 2022 ] Training epoch: 44
178
+ [ Thu Sep 8 19:31:45 2022 ] Learning rate: 0.15
179
+ [ Thu Sep 8 19:35:05 2022 ] Mean training loss: 0.6377.
180
+ [ Thu Sep 8 19:35:05 2022 ] Time consumption: [Data]01%, [Network]99%
181
+ [ Thu Sep 8 19:35:05 2022 ] Training epoch: 45
182
+ [ Thu Sep 8 19:35:05 2022 ] Learning rate: 0.15
183
+ [ Thu Sep 8 19:38:26 2022 ] Mean training loss: 0.5951.
184
+ [ Thu Sep 8 19:38:26 2022 ] Time consumption: [Data]01%, [Network]99%
185
+ [ Thu Sep 8 19:38:26 2022 ] Training epoch: 46
186
+ [ Thu Sep 8 19:38:26 2022 ] Learning rate: 0.15
187
+ [ Thu Sep 8 19:41:47 2022 ] Mean training loss: 0.6152.
188
+ [ Thu Sep 8 19:41:47 2022 ] Time consumption: [Data]01%, [Network]99%
189
+ [ Thu Sep 8 19:41:47 2022 ] Training epoch: 47
190
+ [ Thu Sep 8 19:41:47 2022 ] Learning rate: 0.15
191
+ [ Thu Sep 8 19:45:07 2022 ] Mean training loss: 0.6070.
192
+ [ Thu Sep 8 19:45:07 2022 ] Time consumption: [Data]01%, [Network]99%
193
+ [ Thu Sep 8 19:45:07 2022 ] Training epoch: 48
194
+ [ Thu Sep 8 19:45:07 2022 ] Learning rate: 0.15
195
+ [ Thu Sep 8 19:48:28 2022 ] Mean training loss: 0.5967.
196
+ [ Thu Sep 8 19:48:28 2022 ] Time consumption: [Data]01%, [Network]99%
197
+ [ Thu Sep 8 19:48:28 2022 ] Training epoch: 49
198
+ [ Thu Sep 8 19:48:28 2022 ] Learning rate: 0.15
199
+ [ Thu Sep 8 19:51:49 2022 ] Mean training loss: 0.5964.
200
+ [ Thu Sep 8 19:51:49 2022 ] Time consumption: [Data]01%, [Network]99%
201
+ [ Thu Sep 8 19:51:49 2022 ] Training epoch: 50
202
+ [ Thu Sep 8 19:51:49 2022 ] Learning rate: 0.15
203
+ [ Thu Sep 8 19:55:10 2022 ] Mean training loss: 0.5941.
204
+ [ Thu Sep 8 19:55:10 2022 ] Time consumption: [Data]01%, [Network]99%
205
+ [ Thu Sep 8 19:55:10 2022 ] Training epoch: 51
206
+ [ Thu Sep 8 19:55:10 2022 ] Learning rate: 0.015
207
+ [ Thu Sep 8 19:58:31 2022 ] Mean training loss: 0.2782.
208
+ [ Thu Sep 8 19:58:31 2022 ] Time consumption: [Data]01%, [Network]99%
209
+ [ Thu Sep 8 19:58:31 2022 ] Eval epoch: 51
210
+ [ Thu Sep 8 20:00:43 2022 ] Epoch 51 Curr Acc: (10345/16487)62.75%
211
+ [ Thu Sep 8 20:00:43 2022 ] Epoch 51 Best Acc 62.75%
212
+ [ Thu Sep 8 20:00:43 2022 ] Training epoch: 52
213
+ [ Thu Sep 8 20:00:43 2022 ] Learning rate: 0.015
214
+ [ Thu Sep 8 20:04:04 2022 ] Mean training loss: 0.1700.
215
+ [ Thu Sep 8 20:04:04 2022 ] Time consumption: [Data]01%, [Network]99%
216
+ [ Thu Sep 8 20:04:04 2022 ] Eval epoch: 52
217
+ [ Thu Sep 8 20:06:15 2022 ] Epoch 52 Curr Acc: (10828/16487)65.68%
218
+ [ Thu Sep 8 20:06:15 2022 ] Epoch 52 Best Acc 65.68%
219
+ [ Thu Sep 8 20:06:15 2022 ] Training epoch: 53
220
+ [ Thu Sep 8 20:06:15 2022 ] Learning rate: 0.015
221
+ [ Thu Sep 8 20:09:36 2022 ] Mean training loss: 0.1363.
222
+ [ Thu Sep 8 20:09:36 2022 ] Time consumption: [Data]01%, [Network]99%
223
+ [ Thu Sep 8 20:09:36 2022 ] Eval epoch: 53
224
+ [ Thu Sep 8 20:11:47 2022 ] Epoch 53 Curr Acc: (10646/16487)64.57%
225
+ [ Thu Sep 8 20:11:47 2022 ] Epoch 52 Best Acc 65.68%
226
+ [ Thu Sep 8 20:11:47 2022 ] Training epoch: 54
227
+ [ Thu Sep 8 20:11:47 2022 ] Learning rate: 0.015
228
+ [ Thu Sep 8 20:15:08 2022 ] Mean training loss: 0.1032.
229
+ [ Thu Sep 8 20:15:08 2022 ] Time consumption: [Data]01%, [Network]99%
230
+ [ Thu Sep 8 20:15:08 2022 ] Eval epoch: 54
231
+ [ Thu Sep 8 20:17:19 2022 ] Epoch 54 Curr Acc: (10731/16487)65.09%
232
+ [ Thu Sep 8 20:17:19 2022 ] Epoch 52 Best Acc 65.68%
233
+ [ Thu Sep 8 20:17:19 2022 ] Training epoch: 55
234
+ [ Thu Sep 8 20:17:19 2022 ] Learning rate: 0.015
235
+ [ Thu Sep 8 20:20:40 2022 ] Mean training loss: 0.0870.
236
+ [ Thu Sep 8 20:20:40 2022 ] Time consumption: [Data]01%, [Network]99%
237
+ [ Thu Sep 8 20:20:40 2022 ] Eval epoch: 55
238
+ [ Thu Sep 8 20:22:51 2022 ] Epoch 55 Curr Acc: (10940/16487)66.36%
239
+ [ Thu Sep 8 20:22:51 2022 ] Epoch 55 Best Acc 66.36%
240
+ [ Thu Sep 8 20:22:51 2022 ] Training epoch: 56
241
+ [ Thu Sep 8 20:22:51 2022 ] Learning rate: 0.015
242
+ [ Thu Sep 8 20:26:11 2022 ] Mean training loss: 0.0690.
243
+ [ Thu Sep 8 20:26:11 2022 ] Time consumption: [Data]01%, [Network]99%
244
+ [ Thu Sep 8 20:26:11 2022 ] Eval epoch: 56
245
+ [ Thu Sep 8 20:28:22 2022 ] Epoch 56 Curr Acc: (10542/16487)63.94%
246
+ [ Thu Sep 8 20:28:22 2022 ] Epoch 55 Best Acc 66.36%
247
+ [ Thu Sep 8 20:28:22 2022 ] Training epoch: 57
248
+ [ Thu Sep 8 20:28:22 2022 ] Learning rate: 0.015
249
+ [ Thu Sep 8 20:31:42 2022 ] Mean training loss: 0.0608.
250
+ [ Thu Sep 8 20:31:42 2022 ] Time consumption: [Data]01%, [Network]99%
251
+ [ Thu Sep 8 20:31:43 2022 ] Eval epoch: 57
252
+ [ Thu Sep 8 20:33:54 2022 ] Epoch 57 Curr Acc: (10609/16487)64.35%
253
+ [ Thu Sep 8 20:33:54 2022 ] Epoch 55 Best Acc 66.36%
254
+ [ Thu Sep 8 20:33:54 2022 ] Training epoch: 58
255
+ [ Thu Sep 8 20:33:54 2022 ] Learning rate: 0.015
256
+ [ Thu Sep 8 20:37:14 2022 ] Mean training loss: 0.0489.
257
+ [ Thu Sep 8 20:37:14 2022 ] Time consumption: [Data]01%, [Network]99%
258
+ [ Thu Sep 8 20:37:14 2022 ] Eval epoch: 58
259
+ [ Thu Sep 8 20:39:25 2022 ] Epoch 58 Curr Acc: (10830/16487)65.69%
260
+ [ Thu Sep 8 20:39:25 2022 ] Epoch 55 Best Acc 66.36%
261
+ [ Thu Sep 8 20:39:25 2022 ] Training epoch: 59
262
+ [ Thu Sep 8 20:39:25 2022 ] Learning rate: 0.015
263
+ [ Thu Sep 8 20:42:45 2022 ] Mean training loss: 0.0413.
264
+ [ Thu Sep 8 20:42:45 2022 ] Time consumption: [Data]01%, [Network]99%
265
+ [ Thu Sep 8 20:42:45 2022 ] Eval epoch: 59
266
+ [ Thu Sep 8 20:44:56 2022 ] Epoch 59 Curr Acc: (10588/16487)64.22%
267
+ [ Thu Sep 8 20:44:56 2022 ] Epoch 55 Best Acc 66.36%
268
+ [ Thu Sep 8 20:44:56 2022 ] Training epoch: 60
269
+ [ Thu Sep 8 20:44:56 2022 ] Learning rate: 0.015
270
+ [ Thu Sep 8 20:48:17 2022 ] Mean training loss: 0.0338.
271
+ [ Thu Sep 8 20:48:17 2022 ] Time consumption: [Data]01%, [Network]99%
272
+ [ Thu Sep 8 20:48:17 2022 ] Eval epoch: 60
273
+ [ Thu Sep 8 20:50:28 2022 ] Epoch 60 Curr Acc: (10630/16487)64.48%
274
+ [ Thu Sep 8 20:50:28 2022 ] Epoch 55 Best Acc 66.36%
275
+ [ Thu Sep 8 20:50:28 2022 ] Training epoch: 61
276
+ [ Thu Sep 8 20:50:28 2022 ] Learning rate: 0.015
277
+ [ Thu Sep 8 20:53:48 2022 ] Mean training loss: 0.0319.
278
+ [ Thu Sep 8 20:53:48 2022 ] Time consumption: [Data]01%, [Network]99%
279
+ [ Thu Sep 8 20:53:48 2022 ] Eval epoch: 61
280
+ [ Thu Sep 8 20:55:59 2022 ] Epoch 61 Curr Acc: (10636/16487)64.51%
281
+ [ Thu Sep 8 20:55:59 2022 ] Epoch 55 Best Acc 66.36%
282
+ [ Thu Sep 8 20:55:59 2022 ] Training epoch: 62
283
+ [ Thu Sep 8 20:55:59 2022 ] Learning rate: 0.015
284
+ [ Thu Sep 8 20:59:19 2022 ] Mean training loss: 0.0293.
285
+ [ Thu Sep 8 20:59:19 2022 ] Time consumption: [Data]01%, [Network]99%
286
+ [ Thu Sep 8 20:59:19 2022 ] Eval epoch: 62
287
+ [ Thu Sep 8 21:01:31 2022 ] Epoch 62 Curr Acc: (10644/16487)64.56%
288
+ [ Thu Sep 8 21:01:31 2022 ] Epoch 55 Best Acc 66.36%
289
+ [ Thu Sep 8 21:01:31 2022 ] Training epoch: 63
290
+ [ Thu Sep 8 21:01:31 2022 ] Learning rate: 0.015
291
+ [ Thu Sep 8 21:04:52 2022 ] Mean training loss: 0.0260.
292
+ [ Thu Sep 8 21:04:52 2022 ] Time consumption: [Data]01%, [Network]99%
293
+ [ Thu Sep 8 21:04:52 2022 ] Eval epoch: 63
294
+ [ Thu Sep 8 21:07:03 2022 ] Epoch 63 Curr Acc: (10884/16487)66.02%
295
+ [ Thu Sep 8 21:07:03 2022 ] Epoch 55 Best Acc 66.36%
296
+ [ Thu Sep 8 21:07:03 2022 ] Training epoch: 64
297
+ [ Thu Sep 8 21:07:03 2022 ] Learning rate: 0.015
298
+ [ Thu Sep 8 21:10:24 2022 ] Mean training loss: 0.0219.
299
+ [ Thu Sep 8 21:10:24 2022 ] Time consumption: [Data]01%, [Network]99%
300
+ [ Thu Sep 8 21:10:24 2022 ] Eval epoch: 64
301
+ [ Thu Sep 8 21:12:35 2022 ] Epoch 64 Curr Acc: (10453/16487)63.40%
302
+ [ Thu Sep 8 21:12:35 2022 ] Epoch 55 Best Acc 66.36%
303
+ [ Thu Sep 8 21:12:35 2022 ] Training epoch: 65
304
+ [ Thu Sep 8 21:12:35 2022 ] Learning rate: 0.015
305
+ [ Thu Sep 8 21:15:56 2022 ] Mean training loss: 0.0255.
306
+ [ Thu Sep 8 21:15:56 2022 ] Time consumption: [Data]01%, [Network]99%
307
+ [ Thu Sep 8 21:15:56 2022 ] Eval epoch: 65
308
+ [ Thu Sep 8 21:18:07 2022 ] Epoch 65 Curr Acc: (10705/16487)64.93%
309
+ [ Thu Sep 8 21:18:07 2022 ] Epoch 55 Best Acc 66.36%
310
+ [ Thu Sep 8 21:18:07 2022 ] Training epoch: 66
311
+ [ Thu Sep 8 21:18:07 2022 ] Learning rate: 0.015
312
+ [ Thu Sep 8 21:21:28 2022 ] Mean training loss: 0.0226.
313
+ [ Thu Sep 8 21:21:28 2022 ] Time consumption: [Data]01%, [Network]99%
314
+ [ Thu Sep 8 21:21:28 2022 ] Eval epoch: 66
315
+ [ Thu Sep 8 21:23:39 2022 ] Epoch 66 Curr Acc: (10797/16487)65.49%
316
+ [ Thu Sep 8 21:23:39 2022 ] Epoch 55 Best Acc 66.36%
317
+ [ Thu Sep 8 21:23:39 2022 ] Training epoch: 67
318
+ [ Thu Sep 8 21:23:39 2022 ] Learning rate: 0.015
319
+ [ Thu Sep 8 21:27:00 2022 ] Mean training loss: 0.0182.
320
+ [ Thu Sep 8 21:27:00 2022 ] Time consumption: [Data]01%, [Network]99%
321
+ [ Thu Sep 8 21:27:00 2022 ] Eval epoch: 67
322
+ [ Thu Sep 8 21:29:11 2022 ] Epoch 67 Curr Acc: (10685/16487)64.81%
323
+ [ Thu Sep 8 21:29:11 2022 ] Epoch 55 Best Acc 66.36%
324
+ [ Thu Sep 8 21:29:11 2022 ] Training epoch: 68
325
+ [ Thu Sep 8 21:29:11 2022 ] Learning rate: 0.015
326
+ [ Thu Sep 8 21:32:32 2022 ] Mean training loss: 0.0168.
327
+ [ Thu Sep 8 21:32:32 2022 ] Time consumption: [Data]01%, [Network]99%
328
+ [ Thu Sep 8 21:32:32 2022 ] Eval epoch: 68
329
+ [ Thu Sep 8 21:34:43 2022 ] Epoch 68 Curr Acc: (10819/16487)65.62%
330
+ [ Thu Sep 8 21:34:43 2022 ] Epoch 55 Best Acc 66.36%
331
+ [ Thu Sep 8 21:34:43 2022 ] Training epoch: 69
332
+ [ Thu Sep 8 21:34:43 2022 ] Learning rate: 0.015
333
+ [ Thu Sep 8 21:38:04 2022 ] Mean training loss: 0.0169.
334
+ [ Thu Sep 8 21:38:04 2022 ] Time consumption: [Data]01%, [Network]99%
335
+ [ Thu Sep 8 21:38:04 2022 ] Eval epoch: 69
336
+ [ Thu Sep 8 21:40:15 2022 ] Epoch 69 Curr Acc: (9777/16487)59.30%
337
+ [ Thu Sep 8 21:40:15 2022 ] Epoch 55 Best Acc 66.36%
338
+ [ Thu Sep 8 21:40:15 2022 ] Training epoch: 70
339
+ [ Thu Sep 8 21:40:15 2022 ] Learning rate: 0.015
340
+ [ Thu Sep 8 21:43:36 2022 ] Mean training loss: 0.0226.
341
+ [ Thu Sep 8 21:43:36 2022 ] Time consumption: [Data]01%, [Network]99%
342
+ [ Thu Sep 8 21:43:36 2022 ] Eval epoch: 70
343
+ [ Thu Sep 8 21:45:47 2022 ] Epoch 70 Curr Acc: (10540/16487)63.93%
344
+ [ Thu Sep 8 21:45:47 2022 ] Epoch 55 Best Acc 66.36%
345
+ [ Thu Sep 8 21:45:47 2022 ] Training epoch: 71
346
+ [ Thu Sep 8 21:45:47 2022 ] Learning rate: 0.0015000000000000002
347
+ [ Thu Sep 8 21:49:08 2022 ] Mean training loss: 0.0186.
348
+ [ Thu Sep 8 21:49:08 2022 ] Time consumption: [Data]01%, [Network]99%
349
+ [ Thu Sep 8 21:49:08 2022 ] Eval epoch: 71
350
+ [ Thu Sep 8 21:51:19 2022 ] Epoch 71 Curr Acc: (10906/16487)66.15%
351
+ [ Thu Sep 8 21:51:19 2022 ] Epoch 55 Best Acc 66.36%
352
+ [ Thu Sep 8 21:51:19 2022 ] Training epoch: 72
353
+ [ Thu Sep 8 21:51:19 2022 ] Learning rate: 0.0015000000000000002
354
+ [ Thu Sep 8 21:54:40 2022 ] Mean training loss: 0.0140.
355
+ [ Thu Sep 8 21:54:40 2022 ] Time consumption: [Data]01%, [Network]99%
356
+ [ Thu Sep 8 21:54:41 2022 ] Eval epoch: 72
357
+ [ Thu Sep 8 21:56:52 2022 ] Epoch 72 Curr Acc: (10678/16487)64.77%
358
+ [ Thu Sep 8 21:56:52 2022 ] Epoch 55 Best Acc 66.36%
359
+ [ Thu Sep 8 21:56:52 2022 ] Training epoch: 73
360
+ [ Thu Sep 8 21:56:52 2022 ] Learning rate: 0.0015000000000000002
361
+ [ Thu Sep 8 22:00:12 2022 ] Mean training loss: 0.0141.
362
+ [ Thu Sep 8 22:00:12 2022 ] Time consumption: [Data]01%, [Network]99%
363
+ [ Thu Sep 8 22:00:12 2022 ] Eval epoch: 73
364
+ [ Thu Sep 8 22:02:23 2022 ] Epoch 73 Curr Acc: (10847/16487)65.79%
365
+ [ Thu Sep 8 22:02:23 2022 ] Epoch 55 Best Acc 66.36%
366
+ [ Thu Sep 8 22:02:23 2022 ] Training epoch: 74
367
+ [ Thu Sep 8 22:02:23 2022 ] Learning rate: 0.0015000000000000002
368
+ [ Thu Sep 8 22:05:44 2022 ] Mean training loss: 0.0123.
369
+ [ Thu Sep 8 22:05:44 2022 ] Time consumption: [Data]01%, [Network]99%
370
+ [ Thu Sep 8 22:05:44 2022 ] Eval epoch: 74
371
+ [ Thu Sep 8 22:07:55 2022 ] Epoch 74 Curr Acc: (10615/16487)64.38%
372
+ [ Thu Sep 8 22:07:55 2022 ] Epoch 55 Best Acc 66.36%
373
+ [ Thu Sep 8 22:07:55 2022 ] Training epoch: 75
374
+ [ Thu Sep 8 22:07:55 2022 ] Learning rate: 0.0015000000000000002
375
+ [ Thu Sep 8 22:11:15 2022 ] Mean training loss: 0.0108.
376
+ [ Thu Sep 8 22:11:15 2022 ] Time consumption: [Data]01%, [Network]99%
377
+ [ Thu Sep 8 22:11:15 2022 ] Eval epoch: 75
378
+ [ Thu Sep 8 22:13:27 2022 ] Epoch 75 Curr Acc: (10935/16487)66.32%
379
+ [ Thu Sep 8 22:13:27 2022 ] Epoch 55 Best Acc 66.36%
380
+ [ Thu Sep 8 22:13:27 2022 ] Training epoch: 76
381
+ [ Thu Sep 8 22:13:27 2022 ] Learning rate: 0.0015000000000000002
382
+ [ Thu Sep 8 22:16:48 2022 ] Mean training loss: 0.0120.
383
+ [ Thu Sep 8 22:16:48 2022 ] Time consumption: [Data]01%, [Network]99%
384
+ [ Thu Sep 8 22:16:48 2022 ] Eval epoch: 76
385
+ [ Thu Sep 8 22:18:59 2022 ] Epoch 76 Curr Acc: (10867/16487)65.91%
386
+ [ Thu Sep 8 22:18:59 2022 ] Epoch 55 Best Acc 66.36%
387
+ [ Thu Sep 8 22:18:59 2022 ] Training epoch: 77
388
+ [ Thu Sep 8 22:18:59 2022 ] Learning rate: 0.0015000000000000002
389
+ [ Thu Sep 8 22:22:20 2022 ] Mean training loss: 0.0102.
390
+ [ Thu Sep 8 22:22:20 2022 ] Time consumption: [Data]01%, [Network]99%
391
+ [ Thu Sep 8 22:22:20 2022 ] Eval epoch: 77
392
+ [ Thu Sep 8 22:24:31 2022 ] Epoch 77 Curr Acc: (10603/16487)64.31%
393
+ [ Thu Sep 8 22:24:31 2022 ] Epoch 55 Best Acc 66.36%
394
+ [ Thu Sep 8 22:24:31 2022 ] Training epoch: 78
395
+ [ Thu Sep 8 22:24:31 2022 ] Learning rate: 0.0015000000000000002
396
+ [ Thu Sep 8 22:27:51 2022 ] Mean training loss: 0.0102.
397
+ [ Thu Sep 8 22:27:51 2022 ] Time consumption: [Data]01%, [Network]99%
398
+ [ Thu Sep 8 22:27:51 2022 ] Eval epoch: 78
399
+ [ Thu Sep 8 22:30:03 2022 ] Epoch 78 Curr Acc: (10937/16487)66.34%
400
+ [ Thu Sep 8 22:30:03 2022 ] Epoch 55 Best Acc 66.36%
401
+ [ Thu Sep 8 22:30:03 2022 ] Training epoch: 79
402
+ [ Thu Sep 8 22:30:03 2022 ] Learning rate: 0.0015000000000000002
403
+ [ Thu Sep 8 22:33:23 2022 ] Mean training loss: 0.0114.
404
+ [ Thu Sep 8 22:33:23 2022 ] Time consumption: [Data]01%, [Network]99%
405
+ [ Thu Sep 8 22:33:23 2022 ] Eval epoch: 79
406
+ [ Thu Sep 8 22:35:35 2022 ] Epoch 79 Curr Acc: (11049/16487)67.02%
407
+ [ Thu Sep 8 22:35:35 2022 ] Epoch 79 Best Acc 67.02%
408
+ [ Thu Sep 8 22:35:35 2022 ] Training epoch: 80
409
+ [ Thu Sep 8 22:35:35 2022 ] Learning rate: 0.0015000000000000002
410
+ [ Thu Sep 8 22:38:55 2022 ] Mean training loss: 0.0108.
411
+ [ Thu Sep 8 22:38:55 2022 ] Time consumption: [Data]01%, [Network]99%
412
+ [ Thu Sep 8 22:38:55 2022 ] Eval epoch: 80
413
+ [ Thu Sep 8 22:41:07 2022 ] Epoch 80 Curr Acc: (10682/16487)64.79%
414
+ [ Thu Sep 8 22:41:07 2022 ] Epoch 79 Best Acc 67.02%
415
+ [ Thu Sep 8 22:41:07 2022 ] Training epoch: 81
416
+ [ Thu Sep 8 22:41:07 2022 ] Learning rate: 0.0015000000000000002
417
+ [ Thu Sep 8 22:44:27 2022 ] Mean training loss: 0.0099.
418
+ [ Thu Sep 8 22:44:27 2022 ] Time consumption: [Data]01%, [Network]99%
419
+ [ Thu Sep 8 22:44:27 2022 ] Eval epoch: 81
420
+ [ Thu Sep 8 22:46:38 2022 ] Epoch 81 Curr Acc: (11079/16487)67.20%
421
+ [ Thu Sep 8 22:46:38 2022 ] Epoch 81 Best Acc 67.20%
422
+ [ Thu Sep 8 22:46:38 2022 ] Training epoch: 82
423
+ [ Thu Sep 8 22:46:38 2022 ] Learning rate: 0.0015000000000000002
424
+ [ Thu Sep 8 22:49:59 2022 ] Mean training loss: 0.0107.
425
+ [ Thu Sep 8 22:49:59 2022 ] Time consumption: [Data]01%, [Network]99%
426
+ [ Thu Sep 8 22:49:59 2022 ] Eval epoch: 82
427
+ [ Thu Sep 8 22:52:10 2022 ] Epoch 82 Curr Acc: (10525/16487)63.84%
428
+ [ Thu Sep 8 22:52:10 2022 ] Epoch 81 Best Acc 67.20%
429
+ [ Thu Sep 8 22:52:10 2022 ] Training epoch: 83
430
+ [ Thu Sep 8 22:52:10 2022 ] Learning rate: 0.0015000000000000002
431
+ [ Thu Sep 8 22:55:30 2022 ] Mean training loss: 0.0095.
432
+ [ Thu Sep 8 22:55:30 2022 ] Time consumption: [Data]01%, [Network]99%
433
+ [ Thu Sep 8 22:55:30 2022 ] Eval epoch: 83
434
+ [ Thu Sep 8 22:57:41 2022 ] Epoch 83 Curr Acc: (10832/16487)65.70%
435
+ [ Thu Sep 8 22:57:41 2022 ] Epoch 81 Best Acc 67.20%
436
+ [ Thu Sep 8 22:57:41 2022 ] Training epoch: 84
437
+ [ Thu Sep 8 22:57:41 2022 ] Learning rate: 0.0015000000000000002
438
+ [ Thu Sep 8 23:01:01 2022 ] Mean training loss: 0.0110.
439
+ [ Thu Sep 8 23:01:01 2022 ] Time consumption: [Data]01%, [Network]99%
440
+ [ Thu Sep 8 23:01:02 2022 ] Eval epoch: 84
441
+ [ Thu Sep 8 23:03:13 2022 ] Epoch 84 Curr Acc: (10785/16487)65.42%
442
+ [ Thu Sep 8 23:03:13 2022 ] Epoch 81 Best Acc 67.20%
443
+ [ Thu Sep 8 23:03:13 2022 ] Training epoch: 85
444
+ [ Thu Sep 8 23:03:13 2022 ] Learning rate: 0.0015000000000000002
445
+ [ Thu Sep 8 23:06:33 2022 ] Mean training loss: 0.0099.
446
+ [ Thu Sep 8 23:06:33 2022 ] Time consumption: [Data]01%, [Network]99%
447
+ [ Thu Sep 8 23:06:33 2022 ] Eval epoch: 85
448
+ [ Thu Sep 8 23:08:45 2022 ] Epoch 85 Curr Acc: (10775/16487)65.35%
449
+ [ Thu Sep 8 23:08:45 2022 ] Epoch 81 Best Acc 67.20%
450
+ [ Thu Sep 8 23:08:45 2022 ] Training epoch: 86
451
+ [ Thu Sep 8 23:08:45 2022 ] Learning rate: 0.0015000000000000002
452
+ [ Thu Sep 8 23:12:06 2022 ] Mean training loss: 0.0103.
453
+ [ Thu Sep 8 23:12:06 2022 ] Time consumption: [Data]01%, [Network]99%
454
+ [ Thu Sep 8 23:12:06 2022 ] Eval epoch: 86
455
+ [ Thu Sep 8 23:14:17 2022 ] Epoch 86 Curr Acc: (10707/16487)64.94%
456
+ [ Thu Sep 8 23:14:17 2022 ] Epoch 81 Best Acc 67.20%
457
+ [ Thu Sep 8 23:14:17 2022 ] Training epoch: 87
458
+ [ Thu Sep 8 23:14:17 2022 ] Learning rate: 0.0015000000000000002
459
+ [ Thu Sep 8 23:17:38 2022 ] Mean training loss: 0.0099.
460
+ [ Thu Sep 8 23:17:38 2022 ] Time consumption: [Data]01%, [Network]98%
461
+ [ Thu Sep 8 23:17:38 2022 ] Eval epoch: 87
462
+ [ Thu Sep 8 23:19:49 2022 ] Epoch 87 Curr Acc: (10859/16487)65.86%
463
+ [ Thu Sep 8 23:19:49 2022 ] Epoch 81 Best Acc 67.20%
464
+ [ Thu Sep 8 23:19:49 2022 ] Training epoch: 88
465
+ [ Thu Sep 8 23:19:49 2022 ] Learning rate: 0.0015000000000000002
466
+ [ Thu Sep 8 23:23:09 2022 ] Mean training loss: 0.0093.
467
+ [ Thu Sep 8 23:23:09 2022 ] Time consumption: [Data]01%, [Network]99%
468
+ [ Thu Sep 8 23:23:09 2022 ] Eval epoch: 88
469
+ [ Thu Sep 8 23:25:20 2022 ] Epoch 88 Curr Acc: (10745/16487)65.17%
470
+ [ Thu Sep 8 23:25:20 2022 ] Epoch 81 Best Acc 67.20%
471
+ [ Thu Sep 8 23:25:20 2022 ] Training epoch: 89
472
+ [ Thu Sep 8 23:25:20 2022 ] Learning rate: 0.0015000000000000002
473
+ [ Thu Sep 8 23:28:40 2022 ] Mean training loss: 0.0092.
474
+ [ Thu Sep 8 23:28:40 2022 ] Time consumption: [Data]01%, [Network]99%
475
+ [ Thu Sep 8 23:28:40 2022 ] Eval epoch: 89
476
+ [ Thu Sep 8 23:30:51 2022 ] Epoch 89 Curr Acc: (10769/16487)65.32%
477
+ [ Thu Sep 8 23:30:51 2022 ] Epoch 81 Best Acc 67.20%
478
+ [ Thu Sep 8 23:30:51 2022 ] Training epoch: 90
479
+ [ Thu Sep 8 23:30:51 2022 ] Learning rate: 0.0015000000000000002
480
+ [ Thu Sep 8 23:34:12 2022 ] Mean training loss: 0.0092.
481
+ [ Thu Sep 8 23:34:12 2022 ] Time consumption: [Data]01%, [Network]99%
482
+ [ Thu Sep 8 23:34:12 2022 ] Eval epoch: 90
483
+ [ Thu Sep 8 23:36:24 2022 ] Epoch 90 Curr Acc: (10529/16487)63.86%
484
+ [ Thu Sep 8 23:36:24 2022 ] Epoch 81 Best Acc 67.20%
485
+ [ Thu Sep 8 23:36:24 2022 ] Training epoch: 91
486
+ [ Thu Sep 8 23:36:24 2022 ] Learning rate: 0.00015000000000000004
487
+ [ Thu Sep 8 23:39:45 2022 ] Mean training loss: 0.0094.
488
+ [ Thu Sep 8 23:39:45 2022 ] Time consumption: [Data]01%, [Network]99%
489
+ [ Thu Sep 8 23:39:45 2022 ] Eval epoch: 91
490
+ [ Thu Sep 8 23:41:56 2022 ] Epoch 91 Curr Acc: (10627/16487)64.46%
491
+ [ Thu Sep 8 23:41:56 2022 ] Epoch 81 Best Acc 67.20%
492
+ [ Thu Sep 8 23:41:56 2022 ] Training epoch: 92
493
+ [ Thu Sep 8 23:41:56 2022 ] Learning rate: 0.00015000000000000004
494
+ [ Thu Sep 8 23:45:17 2022 ] Mean training loss: 0.0101.
495
+ [ Thu Sep 8 23:45:17 2022 ] Time consumption: [Data]01%, [Network]99%
496
+ [ Thu Sep 8 23:45:17 2022 ] Eval epoch: 92
497
+ [ Thu Sep 8 23:47:28 2022 ] Epoch 92 Curr Acc: (10965/16487)66.51%
498
+ [ Thu Sep 8 23:47:28 2022 ] Epoch 81 Best Acc 67.20%
499
+ [ Thu Sep 8 23:47:28 2022 ] Training epoch: 93
500
+ [ Thu Sep 8 23:47:28 2022 ] Learning rate: 0.00015000000000000004
501
+ [ Thu Sep 8 23:50:49 2022 ] Mean training loss: 0.0089.
502
+ [ Thu Sep 8 23:50:49 2022 ] Time consumption: [Data]01%, [Network]99%
503
+ [ Thu Sep 8 23:50:49 2022 ] Eval epoch: 93
504
+ [ Thu Sep 8 23:53:00 2022 ] Epoch 93 Curr Acc: (10688/16487)64.83%
505
+ [ Thu Sep 8 23:53:00 2022 ] Epoch 81 Best Acc 67.20%
506
+ [ Thu Sep 8 23:53:00 2022 ] Training epoch: 94
507
+ [ Thu Sep 8 23:53:00 2022 ] Learning rate: 0.00015000000000000004
508
+ [ Thu Sep 8 23:56:20 2022 ] Mean training loss: 0.0093.
509
+ [ Thu Sep 8 23:56:20 2022 ] Time consumption: [Data]01%, [Network]98%
510
+ [ Thu Sep 8 23:56:20 2022 ] Eval epoch: 94
511
+ [ Thu Sep 8 23:58:31 2022 ] Epoch 94 Curr Acc: (10938/16487)66.34%
512
+ [ Thu Sep 8 23:58:31 2022 ] Epoch 81 Best Acc 67.20%
513
+ [ Thu Sep 8 23:58:31 2022 ] Training epoch: 95
514
+ [ Thu Sep 8 23:58:31 2022 ] Learning rate: 0.00015000000000000004
515
+ [ Fri Sep 9 00:01:53 2022 ] Mean training loss: 0.0093.
516
+ [ Fri Sep 9 00:01:53 2022 ] Time consumption: [Data]01%, [Network]99%
517
+ [ Fri Sep 9 00:01:53 2022 ] Eval epoch: 95
518
+ [ Fri Sep 9 00:04:04 2022 ] Epoch 95 Curr Acc: (10874/16487)65.95%
519
+ [ Fri Sep 9 00:04:04 2022 ] Epoch 81 Best Acc 67.20%
520
+ [ Fri Sep 9 00:04:04 2022 ] Training epoch: 96
521
+ [ Fri Sep 9 00:04:04 2022 ] Learning rate: 0.00015000000000000004
522
+ [ Fri Sep 9 00:07:24 2022 ] Mean training loss: 0.0082.
523
+ [ Fri Sep 9 00:07:24 2022 ] Time consumption: [Data]01%, [Network]99%
524
+ [ Fri Sep 9 00:07:24 2022 ] Eval epoch: 96
525
+ [ Fri Sep 9 00:09:36 2022 ] Epoch 96 Curr Acc: (10877/16487)65.97%
526
+ [ Fri Sep 9 00:09:36 2022 ] Epoch 81 Best Acc 67.20%
527
+ [ Fri Sep 9 00:09:36 2022 ] Training epoch: 97
528
+ [ Fri Sep 9 00:09:36 2022 ] Learning rate: 0.00015000000000000004
529
+ [ Fri Sep 9 00:12:55 2022 ] Mean training loss: 0.0089.
530
+ [ Fri Sep 9 00:12:55 2022 ] Time consumption: [Data]01%, [Network]98%
531
+ [ Fri Sep 9 00:12:55 2022 ] Eval epoch: 97
532
+ [ Fri Sep 9 00:15:06 2022 ] Epoch 97 Curr Acc: (10844/16487)65.77%
533
+ [ Fri Sep 9 00:15:06 2022 ] Epoch 81 Best Acc 67.20%
534
+ [ Fri Sep 9 00:15:06 2022 ] Training epoch: 98
535
+ [ Fri Sep 9 00:15:06 2022 ] Learning rate: 0.00015000000000000004
536
+ [ Fri Sep 9 00:18:28 2022 ] Mean training loss: 0.0083.
537
+ [ Fri Sep 9 00:18:28 2022 ] Time consumption: [Data]01%, [Network]99%
538
+ [ Fri Sep 9 00:18:28 2022 ] Eval epoch: 98
539
+ [ Fri Sep 9 00:20:39 2022 ] Epoch 98 Curr Acc: (10985/16487)66.63%
540
+ [ Fri Sep 9 00:20:39 2022 ] Epoch 81 Best Acc 67.20%
541
+ [ Fri Sep 9 00:20:39 2022 ] Training epoch: 99
542
+ [ Fri Sep 9 00:20:39 2022 ] Learning rate: 0.00015000000000000004
543
+ [ Fri Sep 9 00:24:01 2022 ] Mean training loss: 0.0095.
544
+ [ Fri Sep 9 00:24:01 2022 ] Time consumption: [Data]01%, [Network]99%
545
+ [ Fri Sep 9 00:24:01 2022 ] Eval epoch: 99
546
+ [ Fri Sep 9 00:26:12 2022 ] Epoch 99 Curr Acc: (10930/16487)66.29%
547
+ [ Fri Sep 9 00:26:12 2022 ] Epoch 81 Best Acc 67.20%
548
+ [ Fri Sep 9 00:26:12 2022 ] Training epoch: 100
549
+ [ Fri Sep 9 00:26:12 2022 ] Learning rate: 0.00015000000000000004
550
+ [ Fri Sep 9 00:29:33 2022 ] Mean training loss: 0.0086.
551
+ [ Fri Sep 9 00:29:33 2022 ] Time consumption: [Data]01%, [Network]99%
552
+ [ Fri Sep 9 00:29:33 2022 ] Eval epoch: 100
553
+ [ Fri Sep 9 00:31:44 2022 ] Epoch 100 Curr Acc: (11079/16487)67.20%
554
+ [ Fri Sep 9 00:31:44 2022 ] Epoch 81 Best Acc 67.20%
555
+ [ Fri Sep 9 00:31:44 2022 ] Training epoch: 101
556
+ [ Fri Sep 9 00:31:44 2022 ] Learning rate: 0.00015000000000000004
557
+ [ Fri Sep 9 00:35:04 2022 ] Mean training loss: 0.0087.
558
+ [ Fri Sep 9 00:35:04 2022 ] Time consumption: [Data]01%, [Network]99%
559
+ [ Fri Sep 9 00:35:04 2022 ] Eval epoch: 101
560
+ [ Fri Sep 9 00:37:16 2022 ] Epoch 101 Curr Acc: (10870/16487)65.93%
561
+ [ Fri Sep 9 00:37:16 2022 ] Epoch 81 Best Acc 67.20%
562
+ [ Fri Sep 9 00:37:16 2022 ] Training epoch: 102
563
+ [ Fri Sep 9 00:37:16 2022 ] Learning rate: 0.00015000000000000004
564
+ [ Fri Sep 9 00:40:37 2022 ] Mean training loss: 0.0101.
565
+ [ Fri Sep 9 00:40:37 2022 ] Time consumption: [Data]01%, [Network]99%
566
+ [ Fri Sep 9 00:40:37 2022 ] Eval epoch: 102
567
+ [ Fri Sep 9 00:42:48 2022 ] Epoch 102 Curr Acc: (10517/16487)63.79%
568
+ [ Fri Sep 9 00:42:48 2022 ] Epoch 81 Best Acc 67.20%
569
+ [ Fri Sep 9 00:42:48 2022 ] Training epoch: 103
570
+ [ Fri Sep 9 00:42:48 2022 ] Learning rate: 0.00015000000000000004
571
+ [ Fri Sep 9 00:46:09 2022 ] Mean training loss: 0.0102.
572
+ [ Fri Sep 9 00:46:09 2022 ] Time consumption: [Data]01%, [Network]99%
573
+ [ Fri Sep 9 00:46:09 2022 ] Eval epoch: 103
574
+ [ Fri Sep 9 00:48:20 2022 ] Epoch 103 Curr Acc: (10707/16487)64.94%
575
+ [ Fri Sep 9 00:48:20 2022 ] Epoch 81 Best Acc 67.20%
576
+ [ Fri Sep 9 00:48:20 2022 ] Training epoch: 104
577
+ [ Fri Sep 9 00:48:20 2022 ] Learning rate: 0.00015000000000000004
578
+ [ Fri Sep 9 00:51:41 2022 ] Mean training loss: 0.0081.
579
+ [ Fri Sep 9 00:51:41 2022 ] Time consumption: [Data]01%, [Network]99%
580
+ [ Fri Sep 9 00:51:41 2022 ] Eval epoch: 104
581
+ [ Fri Sep 9 00:53:52 2022 ] Epoch 104 Curr Acc: (10923/16487)66.25%
582
+ [ Fri Sep 9 00:53:52 2022 ] Epoch 81 Best Acc 67.20%
583
+ [ Fri Sep 9 00:53:52 2022 ] Training epoch: 105
584
+ [ Fri Sep 9 00:53:52 2022 ] Learning rate: 0.00015000000000000004
585
+ [ Fri Sep 9 00:57:13 2022 ] Mean training loss: 0.0100.
586
+ [ Fri Sep 9 00:57:13 2022 ] Time consumption: [Data]01%, [Network]99%
587
+ [ Fri Sep 9 00:57:13 2022 ] Eval epoch: 105
588
+ [ Fri Sep 9 00:59:24 2022 ] Epoch 105 Curr Acc: (11071/16487)67.15%
589
+ [ Fri Sep 9 00:59:24 2022 ] Epoch 81 Best Acc 67.20%
590
+ [ Fri Sep 9 00:59:24 2022 ] Training epoch: 106
591
+ [ Fri Sep 9 00:59:24 2022 ] Learning rate: 0.00015000000000000004
592
+ [ Fri Sep 9 01:02:44 2022 ] Mean training loss: 0.0107.
593
+ [ Fri Sep 9 01:02:44 2022 ] Time consumption: [Data]01%, [Network]99%
594
+ [ Fri Sep 9 01:02:44 2022 ] Eval epoch: 106
595
+ [ Fri Sep 9 01:04:56 2022 ] Epoch 106 Curr Acc: (10881/16487)66.00%
596
+ [ Fri Sep 9 01:04:56 2022 ] Epoch 81 Best Acc 67.20%
597
+ [ Fri Sep 9 01:04:56 2022 ] Training epoch: 107
598
+ [ Fri Sep 9 01:04:56 2022 ] Learning rate: 0.00015000000000000004
599
+ [ Fri Sep 9 01:08:17 2022 ] Mean training loss: 0.0086.
600
+ [ Fri Sep 9 01:08:17 2022 ] Time consumption: [Data]01%, [Network]99%
601
+ [ Fri Sep 9 01:08:17 2022 ] Eval epoch: 107
602
+ [ Fri Sep 9 01:10:28 2022 ] Epoch 107 Curr Acc: (11012/16487)66.79%
603
+ [ Fri Sep 9 01:10:28 2022 ] Epoch 81 Best Acc 67.20%
604
+ [ Fri Sep 9 01:10:28 2022 ] Training epoch: 108
605
+ [ Fri Sep 9 01:10:28 2022 ] Learning rate: 0.00015000000000000004
606
+ [ Fri Sep 9 01:13:49 2022 ] Mean training loss: 0.0093.
607
+ [ Fri Sep 9 01:13:49 2022 ] Time consumption: [Data]01%, [Network]99%
608
+ [ Fri Sep 9 01:13:49 2022 ] Eval epoch: 108
609
+ [ Fri Sep 9 01:16:00 2022 ] Epoch 108 Curr Acc: (10579/16487)64.17%
610
+ [ Fri Sep 9 01:16:00 2022 ] Epoch 81 Best Acc 67.20%
611
+ [ Fri Sep 9 01:16:00 2022 ] Training epoch: 109
612
+ [ Fri Sep 9 01:16:00 2022 ] Learning rate: 0.00015000000000000004
613
+ [ Fri Sep 9 01:19:21 2022 ] Mean training loss: 0.0093.
614
+ [ Fri Sep 9 01:19:21 2022 ] Time consumption: [Data]01%, [Network]99%
615
+ [ Fri Sep 9 01:19:21 2022 ] Eval epoch: 109
616
+ [ Fri Sep 9 01:21:32 2022 ] Epoch 109 Curr Acc: (10492/16487)63.64%
617
+ [ Fri Sep 9 01:21:32 2022 ] Epoch 81 Best Acc 67.20%
618
+ [ Fri Sep 9 01:21:32 2022 ] Training epoch: 110
619
+ [ Fri Sep 9 01:21:32 2022 ] Learning rate: 0.00015000000000000004
620
+ [ Fri Sep 9 01:24:53 2022 ] Mean training loss: 0.0088.
621
+ [ Fri Sep 9 01:24:53 2022 ] Time consumption: [Data]01%, [Network]99%
622
+ [ Fri Sep 9 01:24:53 2022 ] Eval epoch: 110
623
+ [ Fri Sep 9 01:27:04 2022 ] Epoch 110 Curr Acc: (10646/16487)64.57%
624
+ [ Fri Sep 9 01:27:04 2022 ] Epoch 81 Best Acc 67.20%
625
+ [ Fri Sep 9 01:27:04 2022 ] epoch: 81, best accuracy: 0.6719839873839996
626
+ [ Fri Sep 9 01:27:04 2022 ] Experiment: ./work_dir/ntu/xsub_jm
627
+ [ Fri Sep 9 01:27:04 2022 ] # generator parameters: 2.896055 M.
628
+ [ Fri Sep 9 01:27:04 2022 ] Load weights from ./runs/ntu/xsub_jm/runs-80-79866.pt.
629
+ [ Fri Sep 9 01:27:04 2022 ] Eval epoch: 1
630
+ [ Fri Sep 9 01:29:16 2022 ] Epoch 1 Curr Acc: (11079/16487)67.20%
631
+ [ Fri Sep 9 01:29:16 2022 ] Epoch 81 Best Acc 67.20%
ckpt/Others/MST-GCN/ntu60_xview/xview_b/AEMST_GCN.py ADDED
@@ -0,0 +1,168 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+ import torch.nn as nn
3
+ import torch.nn.functional as F
4
+
5
+ import numpy as np
6
+ import math
7
+
8
+ import sys
9
+ sys.path.append('../')
10
+ from model.layers import Basic_Layer, Basic_TCN_layer, MS_TCN_layer, Temporal_Bottleneck_Layer, \
11
+ MS_Temporal_Bottleneck_Layer, Temporal_Sep_Layer, Basic_GCN_layer, MS_GCN_layer, Spatial_Bottleneck_Layer, \
12
+ MS_Spatial_Bottleneck_Layer, SpatialGraphCov, Spatial_Sep_Layer
13
+ from model.activations import Activations
14
+ from model.utils import import_class, conv_branch_init, conv_init, bn_init
15
+ from model.attentions import Attention_Layer
16
+
17
+ # import model.attentions
18
+
19
+ __block_type__ = {
20
+ 'basic': (Basic_GCN_layer, Basic_TCN_layer),
21
+ 'bottle': (Spatial_Bottleneck_Layer, Temporal_Bottleneck_Layer),
22
+ 'sep': (Spatial_Sep_Layer, Temporal_Sep_Layer),
23
+ 'ms': (MS_GCN_layer, MS_TCN_layer),
24
+ 'ms_bottle': (MS_Spatial_Bottleneck_Layer, MS_Temporal_Bottleneck_Layer),
25
+ }
26
+
27
+
28
+ class Model(nn.Module):
29
+ def __init__(self, num_class, num_point, num_person, block_args, graph, graph_args, kernel_size, block_type, atten,
30
+ **kwargs):
31
+ super(Model, self).__init__()
32
+ kwargs['act'] = Activations(kwargs['act'])
33
+ atten = None if atten == 'None' else atten
34
+ if graph is None:
35
+ raise ValueError()
36
+ else:
37
+ Graph = import_class(graph)
38
+ self.graph = Graph(**graph_args)
39
+ A = self.graph.A
40
+
41
+ self.data_bn = nn.BatchNorm1d(num_person * block_args[0][0] * num_point)
42
+
43
+ self.layers = nn.ModuleList()
44
+
45
+ for i, block in enumerate(block_args):
46
+ if i == 0:
47
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
48
+ kernel_size=kernel_size, stride=block[3], A=A, block_type='basic',
49
+ atten=None, **kwargs))
50
+ else:
51
+ self.layers.append(MST_GCN_block(in_channels=block[0], out_channels=block[1], residual=block[2],
52
+ kernel_size=kernel_size, stride=block[3], A=A, block_type=block_type,
53
+ atten=atten, **kwargs))
54
+
55
+ self.gap = nn.AdaptiveAvgPool2d(1)
56
+ self.fc = nn.Linear(block_args[-1][1], num_class)
57
+
58
+ for m in self.modules():
59
+ if isinstance(m, SpatialGraphCov) or isinstance(m, Spatial_Sep_Layer):
60
+ for mm in m.modules():
61
+ if isinstance(mm, nn.Conv2d):
62
+ conv_branch_init(mm, self.graph.A.shape[0])
63
+ if isinstance(mm, nn.BatchNorm2d):
64
+ bn_init(mm, 1)
65
+ elif isinstance(m, nn.Conv2d):
66
+ conv_init(m)
67
+ elif isinstance(m, nn.BatchNorm2d) or isinstance(m, nn.BatchNorm1d):
68
+ bn_init(m, 1)
69
+ elif isinstance(m, nn.Linear):
70
+ nn.init.normal_(m.weight, 0, math.sqrt(2. / num_class))
71
+
72
+ def forward(self, x):
73
+ N, C, T, V, M = x.size()
74
+
75
+ x = x.permute(0, 4, 3, 1, 2).contiguous().view(N, M * V * C, T) # N C T V M --> N M V C T
76
+ x = self.data_bn(x)
77
+ x = x.view(N, M, V, C, T).permute(0, 1, 3, 4, 2).contiguous().view(N * M, C, T, V)
78
+
79
+ for i, layer in enumerate(self.layers):
80
+ x = layer(x)
81
+
82
+ features = x
83
+
84
+ x = self.gap(x).view(N, M, -1).mean(dim=1)
85
+ x = self.fc(x)
86
+
87
+ return features, x
88
+
89
+
90
+ class MST_GCN_block(nn.Module):
91
+ def __init__(self, in_channels, out_channels, residual, kernel_size, stride, A, block_type, atten, **kwargs):
92
+ super(MST_GCN_block, self).__init__()
93
+ self.atten = atten
94
+ self.msgcn = __block_type__[block_type][0](in_channels=in_channels, out_channels=out_channels, A=A,
95
+ residual=residual, **kwargs)
96
+ self.mstcn = __block_type__[block_type][1](channels=out_channels, kernel_size=kernel_size, stride=stride,
97
+ residual=residual, **kwargs)
98
+ if atten is not None:
99
+ self.att = Attention_Layer(out_channels, atten, **kwargs)
100
+
101
+ def forward(self, x):
102
+ return self.att(self.mstcn(self.msgcn(x))) if self.atten is not None else self.mstcn(self.msgcn(x))
103
+
104
+
105
+ if __name__ == '__main__':
106
+ import sys
107
+ import time
108
+
109
+ parts = [
110
+ np.array([5, 6, 7, 8, 22, 23]) - 1, # left_arm
111
+ np.array([9, 10, 11, 12, 24, 25]) - 1, # right_arm
112
+ np.array([13, 14, 15, 16]) - 1, # left_leg
113
+ np.array([17, 18, 19, 20]) - 1, # right_leg
114
+ np.array([1, 2, 3, 4, 21]) - 1 # torso
115
+ ]
116
+
117
+ warmup_iter = 3
118
+ test_iter = 10
119
+ sys.path.append('/home/chenzhan/mywork/MST-GCN/')
120
+ from thop import profile
121
+ basic_channels = 112
122
+ cfgs = {
123
+ 'num_class': 2,
124
+ 'num_point': 25,
125
+ 'num_person': 1,
126
+ 'block_args': [[2, basic_channels, False, 1],
127
+ [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1], [basic_channels, basic_channels, True, 1],
128
+ [basic_channels, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1], [basic_channels*2, basic_channels*2, True, 1],
129
+ [basic_channels*2, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1], [basic_channels*4, basic_channels*4, True, 1]],
130
+ 'graph': 'graph.ntu_rgb_d.Graph',
131
+ 'graph_args': {'labeling_mode': 'spatial'},
132
+ 'kernel_size': 9,
133
+ 'block_type': 'ms',
134
+ 'reduct_ratio': 2,
135
+ 'expand_ratio': 0,
136
+ 't_scale': 4,
137
+ 'layer_type': 'sep',
138
+ 'act': 'relu',
139
+ 's_scale': 4,
140
+ 'atten': 'stcja',
141
+ 'bias': True,
142
+ 'parts': parts
143
+ }
144
+
145
+ model = Model(**cfgs)
146
+
147
+ N, C, T, V, M = 4, 2, 16, 25, 1
148
+ inputs = torch.rand(N, C, T, V, M)
149
+
150
+ for i in range(warmup_iter + test_iter):
151
+ if i == warmup_iter:
152
+ start_time = time.time()
153
+ outputs = model(inputs)
154
+ end_time = time.time()
155
+
156
+ total_time = end_time - start_time
157
+ print('iter_with_CPU: {:.2f} s/{} iters, persample: {:.2f} s/iter '.format(
158
+ total_time, test_iter, total_time/test_iter/N))
159
+
160
+ print(outputs.size())
161
+
162
+ hereflops, params = profile(model, inputs=(inputs,), verbose=False)
163
+ print('# GFlops is {} G'.format(hereflops / 10 ** 9 / N))
164
+ print('# Params is {} M'.format(sum(param.numel() for param in model.parameters()) / 10 ** 6))
165
+
166
+
167
+
168
+
ckpt/Others/MST-GCN/ntu60_xview/xview_b/config.yaml ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ base_lr: 0.15
2
+ batch_size: 8
3
+ config: config/ntu/xview_b.yaml
4
+ device:
5
+ - 0
6
+ eval_interval: 5
7
+ feeder: feeders.feeder.Feeder
8
+ ignore_weights: []
9
+ local_rank: 0
10
+ log_interval: 100
11
+ model: model.AEMST_GCN.Model
12
+ model_args:
13
+ act: relu
14
+ atten: None
15
+ bias: true
16
+ block_args:
17
+ - - 3
18
+ - 112
19
+ - false
20
+ - 1
21
+ - - 112
22
+ - 112
23
+ - true
24
+ - 1
25
+ - - 112
26
+ - 112
27
+ - true
28
+ - 1
29
+ - - 112
30
+ - 112
31
+ - true
32
+ - 1
33
+ - - 112
34
+ - 224
35
+ - true
36
+ - 2
37
+ - - 224
38
+ - 224
39
+ - true
40
+ - 1
41
+ - - 224
42
+ - 224
43
+ - true
44
+ - 1
45
+ - - 224
46
+ - 448
47
+ - true
48
+ - 2
49
+ - - 448
50
+ - 448
51
+ - true
52
+ - 1
53
+ - - 448
54
+ - 448
55
+ - true
56
+ - 1
57
+ block_type: ms
58
+ expand_ratio: 0
59
+ graph: graph.ntu_rgb_d.Graph
60
+ graph_args:
61
+ labeling_mode: spatial
62
+ kernel_size: 9
63
+ layer_type: basic
64
+ num_class: 60
65
+ num_person: 2
66
+ num_point: 25
67
+ reduct_ratio: 2
68
+ s_scale: 4
69
+ t_scale: 4
70
+ model_path: ''
71
+ model_saved_name: ./runs/ntu/xview_b/runs
72
+ nesterov: true
73
+ num_epoch: 110
74
+ num_worker: 32
75
+ only_train_epoch: 0
76
+ only_train_part: false
77
+ optimizer: SGD
78
+ phase: train
79
+ print_log: true
80
+ save_interval: 1
81
+ save_score: true
82
+ seed: 1
83
+ show_topk:
84
+ - 1
85
+ - 5
86
+ start_epoch: 0
87
+ step:
88
+ - 50
89
+ - 70
90
+ - 90
91
+ test_batch_size: 64
92
+ test_feeder_args:
93
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_data_bone.npy
94
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/val_label.pkl
95
+ train_feeder_args:
96
+ data_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_data_bone.npy
97
+ debug: false
98
+ label_path: /data/lhd/long_tailed_skeleton_data/MS-G3D-data/ntu/xview/train_label.pkl
99
+ normalization: false
100
+ random_choose: false
101
+ random_move: false
102
+ random_shift: false
103
+ window_size: -1
104
+ warm_up_epoch: 10
105
+ weight_decay: 0.0001
106
+ weights: null
107
+ work_dir: ./work_dir/ntu/xview_b