gabrycina commited on
Commit
dec6e74
·
verified ·
1 Parent(s): a181709

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. README.md +37 -2
  2. best_model.pth +1 -1
  3. config.json +1 -1
  4. metrics.json +201 -201
README.md CHANGED
@@ -1,3 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
 
2
  # EEG to MEG Prediction Model
3
 
@@ -8,8 +21,30 @@ This model was trained to predict MEG signals from EEG recordings.
8
  - Batch Size: 32
9
  - Learning Rate: 0.0001
10
  - Device: mps
11
- - Training Date: 20250104_184834
12
 
13
  ## Performance
14
- - Best Validation Loss: 0.166563
15
  - Best Epoch: 100
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ language: en
3
+ tags:
4
+ - eeg
5
+ - meg
6
+ - pytorch
7
+ - neuroimaging
8
+ license: mit
9
+ datasets:
10
+ - gabrycina/eeg2meg-tiny
11
+ metrics:
12
+ - mse
13
+ ---
14
 
15
  # EEG to MEG Prediction Model
16
 
 
21
  - Batch Size: 32
22
  - Learning Rate: 0.0001
23
  - Device: mps
24
+ - Training Date: 20250104_185119
25
 
26
  ## Performance
27
+ - Best Validation Loss: 0.171059
28
  - Best Epoch: 100
29
+
30
+ ## Model Description
31
+
32
+ This model uses a deep learning architecture to predict MEG signals from EEG recordings. The architecture includes:
33
+ - Frequency and temporal convolutions for feature extraction
34
+ - Multi-head attention mechanisms for sensor relationships
35
+ - Residual connections for better gradient flow
36
+ - Separate prediction heads for magnetometers and gradiometers
37
+
38
+ ## Usage
39
+
40
+ ```python
41
+ import torch
42
+
43
+ # Load the model
44
+ model = torch.load('best_model.pth')
45
+
46
+ # Prepare your EEG data (shape: [batch_size, channels, time_points])
47
+ # Make predictions
48
+ with torch.no_grad():
49
+ meg_predictions = model(eeg_data)
50
+ ```
best_model.pth CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:2b697eaba10d249564561540aa39443f993e09aa4733e9b9103bcc464838f960
3
  size 56297014
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:27864e02771bd31a2f41c408c9639bcbfbf72e7c0b4280db07935cab55c6700d
3
  size 56297014
config.json CHANGED
@@ -4,5 +4,5 @@
4
  "num_epochs": 100,
5
  "learning_rate": 0.0001,
6
  "device": "mps",
7
- "timestamp": "20250104_184834"
8
  }
 
4
  "num_epochs": 100,
5
  "learning_rate": 0.0001,
6
  "device": "mps",
7
+ "timestamp": "20250104_185119"
8
  }
metrics.json CHANGED
@@ -1,208 +1,208 @@
1
  {
2
  "train_losses": [
3
- 0.5830281972885132,
4
- 0.4379337430000305,
5
- 0.35303473472595215,
6
- 0.2991771996021271,
7
- 0.26284268498420715,
8
- 0.23944523930549622,
9
- 0.22291284799575806,
10
- 0.21095529198646545,
11
- 0.20198865234851837,
12
- 0.1951841562986374,
13
- 0.1899258941411972,
14
- 0.18568095564842224,
15
- 0.18216128647327423,
16
- 0.17918431758880615,
17
- 0.1766282171010971,
18
- 0.1744052767753601,
19
- 0.17244285345077515,
20
- 0.17068837583065033,
21
- 0.1691243052482605,
22
- 0.16775813698768616,
23
- 0.16656836867332458,
24
- 0.16551971435546875,
25
- 0.16458189487457275,
26
- 0.16375023126602173,
27
- 0.16303186118602753,
28
- 0.16242317855358124,
29
- 0.16190014779567719,
30
- 0.1614329218864441,
31
- 0.16100405156612396,
32
- 0.16061198711395264,
33
- 0.16026200354099274,
34
- 0.1599571704864502,
35
- 0.15969157218933105,
36
- 0.15945498645305634,
37
- 0.15923674404621124,
38
- 0.15903016924858093,
39
- 0.15883389115333557,
40
- 0.15865062177181244,
41
- 0.15848270058631897,
42
- 0.15833120048046112,
43
- 0.15819500386714935,
44
- 0.15807253122329712,
45
- 0.1579621136188507,
46
- 0.15786176919937134,
47
- 0.15776875615119934,
48
- 0.15768049657344818,
49
- 0.15759611129760742,
50
- 0.15751619637012482,
51
- 0.15744167566299438,
52
- 0.1573726236820221,
53
- 0.15730805695056915,
54
- 0.1572466641664505,
55
- 0.15718770027160645,
56
- 0.15713083744049072,
57
- 0.15707606077194214,
58
- 0.1570231318473816,
59
- 0.15697155892848969,
60
- 0.15692052245140076,
61
- 0.15686951577663422,
62
- 0.15681816637516022,
63
- 0.1567661464214325,
64
- 0.15671305358409882,
65
- 0.1566583216190338,
66
- 0.15660130977630615,
67
- 0.15654116868972778,
68
- 0.15647681057453156,
69
- 0.15640701353549957,
70
- 0.15633046627044678,
71
- 0.15624551475048065,
72
- 0.1561497449874878,
73
- 0.15603992342948914,
74
- 0.15591183304786682,
75
- 0.15575937926769257,
76
- 0.1555730253458023,
77
- 0.1553381383419037,
78
- 0.15505431592464447,
79
- 0.15471093356609344,
80
- 0.15428730845451355,
81
- 0.15379789471626282,
82
- 0.15327149629592896,
83
- 0.15269336104393005,
84
- 0.15197916328907013,
85
- 0.15113338828086853,
86
- 0.15018542110919952,
87
- 0.14908911287784576,
88
- 0.14784196019172668,
89
- 0.14648233354091644,
90
- 0.14507165551185608,
91
- 0.143690824508667,
92
- 0.14229804277420044,
93
- 0.14092038571834564,
94
- 0.13951298594474792,
95
- 0.13812172412872314,
96
- 0.13673223555088043,
97
- 0.13535749912261963,
98
- 0.13404345512390137,
99
- 0.13282622396945953,
100
- 0.13166658580303192,
101
- 0.1306070238351822,
102
- 0.12963762879371643
103
  ],
104
  "val_losses": [
105
- 0.18754221498966217,
106
- 0.1878098100423813,
107
- 0.1882069706916809,
108
- 0.18865254521369934,
109
- 0.1891276091337204,
110
- 0.18960045278072357,
111
- 0.19006329774856567,
112
- 0.19052182137966156,
113
- 0.19098249077796936,
114
- 0.19143730401992798,
115
- 0.1918884664773941,
116
- 0.1923304945230484,
117
- 0.19274792075157166,
118
- 0.19313135743141174,
119
- 0.19348053634166718,
120
- 0.19378969073295593,
121
- 0.19405819475650787,
122
- 0.19427087903022766,
123
- 0.19442135095596313,
124
- 0.19451084733009338,
125
- 0.19455760717391968,
126
- 0.1945662647485733,
127
- 0.1945428103208542,
128
- 0.1944851577281952,
129
- 0.19441433250904083,
130
- 0.19435344636440277,
131
- 0.19429334998130798,
132
- 0.19423216581344604,
133
- 0.19417400658130646,
134
- 0.1941206306219101,
135
- 0.1940767467021942,
136
- 0.19403590261936188,
137
- 0.19398614764213562,
138
- 0.19391779601573944,
139
- 0.19383880496025085,
140
- 0.19375385344028473,
141
- 0.19366218149662018,
142
- 0.19356855750083923,
143
- 0.19347944855690002,
144
- 0.19338737428188324,
145
- 0.19329068064689636,
146
- 0.19318586587905884,
147
- 0.1930726170539856,
148
- 0.19295823574066162,
149
- 0.19284287095069885,
150
- 0.19273097813129425,
151
- 0.19262360036373138,
152
- 0.19252152740955353,
153
- 0.19242407381534576,
154
- 0.19233034551143646,
155
- 0.19224052131175995,
156
- 0.1921534538269043,
157
- 0.19207024574279785,
158
- 0.19199268519878387,
159
- 0.191922128200531,
160
- 0.1918611377477646,
161
- 0.1918095499277115,
162
- 0.191767156124115,
163
- 0.19173018634319305,
164
- 0.19169574975967407,
165
- 0.19166356325149536,
166
- 0.19163286685943604,
167
- 0.19160205125808716,
168
- 0.19156938791275024,
169
- 0.19153369963169098,
170
- 0.1914953887462616,
171
- 0.19145505130290985,
172
- 0.19141462445259094,
173
- 0.1913755089044571,
174
- 0.19133895635604858,
175
- 0.19130481779575348,
176
- 0.191273033618927,
177
- 0.19124671816825867,
178
- 0.19123347103595734,
179
- 0.1912316232919693,
180
- 0.1912107765674591,
181
- 0.19114622473716736,
182
- 0.1910923570394516,
183
- 0.19106736779212952,
184
- 0.19090144336223602,
185
- 0.19050434231758118,
186
- 0.18992651998996735,
187
- 0.18926772475242615,
188
- 0.18854667246341705,
189
- 0.1877654641866684,
190
- 0.1869177222251892,
191
- 0.18596912920475006,
192
- 0.18492525815963745,
193
- 0.1836531162261963,
194
- 0.1820530742406845,
195
- 0.18019172549247742,
196
- 0.1785041242837906,
197
- 0.17691537737846375,
198
- 0.17529572546482086,
199
- 0.17359544336795807,
200
- 0.1719386726617813,
201
- 0.1702776551246643,
202
- 0.1688225120306015,
203
- 0.16760993003845215,
204
- 0.16656319797039032
205
  ],
206
  "best_epoch": 100,
207
- "best_val_loss": 0.16656319797039032
208
  }
 
1
  {
2
  "train_losses": [
3
+ 0.5594673156738281,
4
+ 0.4270159602165222,
5
+ 0.34533312916755676,
6
+ 0.2923196852207184,
7
+ 0.2586347758769989,
8
+ 0.23653310537338257,
9
+ 0.22086045145988464,
10
+ 0.20962533354759216,
11
+ 0.20119275152683258,
12
+ 0.1945776790380478,
13
+ 0.18925841152668,
14
+ 0.1849389672279358,
15
+ 0.1812785565853119,
16
+ 0.17810198664665222,
17
+ 0.1753469854593277,
18
+ 0.17296956479549408,
19
+ 0.1709509789943695,
20
+ 0.16926102340221405,
21
+ 0.16784779727458954,
22
+ 0.16664595901966095,
23
+ 0.16559918224811554,
24
+ 0.16468144953250885,
25
+ 0.16388072073459625,
26
+ 0.16317768394947052,
27
+ 0.1625494509935379,
28
+ 0.1619788110256195,
29
+ 0.1614670306444168,
30
+ 0.16101856529712677,
31
+ 0.1606292575597763,
32
+ 0.16028964519500732,
33
+ 0.15998998284339905,
34
+ 0.15972478687763214,
35
+ 0.15948957204818726,
36
+ 0.1592799723148346,
37
+ 0.1590903103351593,
38
+ 0.1589149683713913,
39
+ 0.15875042974948883,
40
+ 0.15859366953372955,
41
+ 0.1584414839744568,
42
+ 0.15829166769981384,
43
+ 0.1581461876630783,
44
+ 0.15800932049751282,
45
+ 0.15788494050502777,
46
+ 0.15777456760406494,
47
+ 0.15767808258533478,
48
+ 0.15759330987930298,
49
+ 0.1575169861316681,
50
+ 0.15744546055793762,
51
+ 0.15737588703632355,
52
+ 0.15730665624141693,
53
+ 0.15723735094070435,
54
+ 0.15716806054115295,
55
+ 0.15709955990314484,
56
+ 0.15703235566616058,
57
+ 0.15696600079536438,
58
+ 0.15689902007579803,
59
+ 0.15682996809482574,
60
+ 0.15675795078277588,
61
+ 0.15668238699436188,
62
+ 0.15660230815410614,
63
+ 0.1565159112215042,
64
+ 0.15642081201076508,
65
+ 0.1563139706850052,
66
+ 0.15619169175624847,
67
+ 0.15604905784130096,
68
+ 0.15587930381298065,
69
+ 0.15567336976528168,
70
+ 0.15541885793209076,
71
+ 0.15510250627994537,
72
+ 0.1547168344259262,
73
+ 0.15423710644245148,
74
+ 0.15363956987857819,
75
+ 0.15292970836162567,
76
+ 0.15207436680793762,
77
+ 0.15118402242660522,
78
+ 0.15031561255455017,
79
+ 0.14938884973526,
80
+ 0.148528054356575,
81
+ 0.14767016470432281,
82
+ 0.14681878685951233,
83
+ 0.1460137963294983,
84
+ 0.14525267481803894,
85
+ 0.14451563358306885,
86
+ 0.14382581412792206,
87
+ 0.14319950342178345,
88
+ 0.1426372081041336,
89
+ 0.14213189482688904,
90
+ 0.1416567862033844,
91
+ 0.141194149851799,
92
+ 0.14076738059520721,
93
+ 0.14035241305828094,
94
+ 0.13991519808769226,
95
+ 0.13943901658058167,
96
+ 0.1389038860797882,
97
+ 0.13828983902931213,
98
+ 0.1376081258058548,
99
+ 0.13679949939250946,
100
+ 0.13591496646404266,
101
+ 0.13498111069202423,
102
+ 0.13399161398410797
103
  ],
104
  "val_losses": [
105
+ 0.1875462681055069,
106
+ 0.18779119849205017,
107
+ 0.1881019026041031,
108
+ 0.18848517537117004,
109
+ 0.18891803920269012,
110
+ 0.18935167789459229,
111
+ 0.18976734578609467,
112
+ 0.19015032052993774,
113
+ 0.1904982626438141,
114
+ 0.19081363081932068,
115
+ 0.19109763205051422,
116
+ 0.19135597348213196,
117
+ 0.19157443940639496,
118
+ 0.19176484644412994,
119
+ 0.19194315373897552,
120
+ 0.19212795794010162,
121
+ 0.19232401251792908,
122
+ 0.19253060221672058,
123
+ 0.19273820519447327,
124
+ 0.19294314086437225,
125
+ 0.19317211210727692,
126
+ 0.19341173768043518,
127
+ 0.19364714622497559,
128
+ 0.19384723901748657,
129
+ 0.19399899244308472,
130
+ 0.19411413371562958,
131
+ 0.19417986273765564,
132
+ 0.19420573115348816,
133
+ 0.19419170916080475,
134
+ 0.19414062798023224,
135
+ 0.19406332075595856,
136
+ 0.1939702033996582,
137
+ 0.1938701570034027,
138
+ 0.19376419484615326,
139
+ 0.19364114105701447,
140
+ 0.19349706172943115,
141
+ 0.19334006309509277,
142
+ 0.1931755542755127,
143
+ 0.19301441311836243,
144
+ 0.1928645819425583,
145
+ 0.19272786378860474,
146
+ 0.19260625541210175,
147
+ 0.1925038844347,
148
+ 0.19242164492607117,
149
+ 0.19235368072986603,
150
+ 0.19229502975940704,
151
+ 0.1922423094511032,
152
+ 0.19219404458999634,
153
+ 0.19214919209480286,
154
+ 0.19210313260555267,
155
+ 0.19205360114574432,
156
+ 0.19200220704078674,
157
+ 0.19195108115673065,
158
+ 0.1919029951095581,
159
+ 0.19185861945152283,
160
+ 0.19181932508945465,
161
+ 0.1917840987443924,
162
+ 0.19175124168395996,
163
+ 0.1917199194431305,
164
+ 0.1916886866092682,
165
+ 0.19165483117103577,
166
+ 0.1916167140007019,
167
+ 0.19157260656356812,
168
+ 0.19152085483074188,
169
+ 0.19146084785461426,
170
+ 0.19139179587364197,
171
+ 0.19131740927696228,
172
+ 0.19124945998191833,
173
+ 0.19116735458374023,
174
+ 0.1910485178232193,
175
+ 0.19089438021183014,
176
+ 0.19074174761772156,
177
+ 0.1906297504901886,
178
+ 0.1905340999364853,
179
+ 0.19026246666908264,
180
+ 0.1894928365945816,
181
+ 0.18858961760997772,
182
+ 0.18769629299640656,
183
+ 0.18683038651943207,
184
+ 0.18601162731647491,
185
+ 0.18519598245620728,
186
+ 0.1842837631702423,
187
+ 0.1833217591047287,
188
+ 0.18239526450634003,
189
+ 0.18150457739830017,
190
+ 0.18054954707622528,
191
+ 0.17967547476291656,
192
+ 0.1787826418876648,
193
+ 0.1779828816652298,
194
+ 0.1773221790790558,
195
+ 0.17676959931850433,
196
+ 0.1763739138841629,
197
+ 0.1759977787733078,
198
+ 0.17545710504055023,
199
+ 0.17473915219306946,
200
+ 0.17397843301296234,
201
+ 0.17344889044761658,
202
+ 0.17265236377716064,
203
+ 0.17184807360172272,
204
+ 0.17105890810489655
205
  ],
206
  "best_epoch": 100,
207
+ "best_val_loss": 0.17105890810489655
208
  }