Update README.md
Browse files
README.md
CHANGED
@@ -13,10 +13,13 @@ tags:
|
|
13 |
- htr
|
14 |
- ocr
|
15 |
---
|
16 |
-
|
17 |

|
18 |
|
19 |
-
#
|
|
|
|
|
|
|
20 |
|
21 |
## Training Summary
|
22 |
- Total epochs: 52
|
@@ -77,4 +80,43 @@ tags:
|
|
77 |
| 48 | 0.00252 | 0.650962 | 0.011476 | 0.001038 | 0.446807 | 0.00407 |
|
78 |
| 49 | 0.002543 | 0.662387 | 0.011516 | 0.00105 | 0.446939 | 0.004147 |
|
79 |
| 50 | 0.002528 | 0.654322 | 0.01149 | 0.001044 | 0.445585 | 0.004111 |
|
80 |
-
| 51 | 0.002518 | 0.653832 | 0.011452 | 0.001051 | 0.447358 | 0.004158 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
- htr
|
14 |
- ocr
|
15 |
---
|
16 |
+
# Pretrained
|
17 |

|
18 |
|
19 |
+
# Finetuned
|
20 |
+

|
21 |
+
|
22 |
+
# PyLaia Pretraining Metrics
|
23 |
|
24 |
## Training Summary
|
25 |
- Total epochs: 52
|
|
|
80 |
| 48 | 0.00252 | 0.650962 | 0.011476 | 0.001038 | 0.446807 | 0.00407 |
|
81 |
| 49 | 0.002543 | 0.662387 | 0.011516 | 0.00105 | 0.446939 | 0.004147 |
|
82 |
| 50 | 0.002528 | 0.654322 | 0.01149 | 0.001044 | 0.445585 | 0.004111 |
|
83 |
+
| 51 | 0.002518 | 0.653832 | 0.011452 | 0.001051 | 0.447358 | 0.004158 |
|
84 |
+
|
85 |
+
# PyLaia Finetuning Metrics
|
86 |
+
|
87 |
+
## Training Summary
|
88 |
+
- Total epochs: 27
|
89 |
+
- Best validation CER: 0.000869 (epoch 21)
|
90 |
+
- Best validation WER: 0.003693 (epoch 21)
|
91 |
+
|
92 |
+
## Full Metrics Table
|
93 |
+
|
94 |
+
| epoch | tr_cer | tr_loss | tr_wer | va_cer | va_loss | va_wer |
|
95 |
+
|--------:|---------:|----------:|---------:|---------:|----------:|---------:|
|
96 |
+
| 0 | 0.183214 | 35.8187 | 0.224537 | 0.001531 | 0.370508 | 0.006411 |
|
97 |
+
| 1 | 0.003867 | 0.883387 | 0.018513 | 0.001255 | 0.298736 | 0.005188 |
|
98 |
+
| 2 | 0.003428 | 0.769932 | 0.016496 | 0.001143 | 0.245342 | 0.00476 |
|
99 |
+
| 3 | 0.003254 | 0.734276 | 0.015593 | 0.001168 | 0.262834 | 0.004929 |
|
100 |
+
| 4 | 0.003137 | 0.688775 | 0.01504 | 0.001151 | 0.244334 | 0.004897 |
|
101 |
+
| 5 | 0.00301 | 0.667008 | 0.014394 | 0.001086 | 0.230109 | 0.004562 |
|
102 |
+
| 6 | 0.002953 | 0.656692 | 0.01415 | 0.001123 | 0.238079 | 0.004684 |
|
103 |
+
| 7 | 0.002937 | 0.650136 | 0.014223 | 0.001034 | 0.234374 | 0.004454 |
|
104 |
+
| 8 | 0.002913 | 0.644122 | 0.014053 | 0.001017 | 0.233998 | 0.004383 |
|
105 |
+
| 9 | 0.002822 | 0.626512 | 0.013617 | 0.001038 | 0.23499 | 0.004427 |
|
106 |
+
| 10 | 0.00291 | 0.644877 | 0.014099 | 0.001179 | 0.265034 | 0.005115 |
|
107 |
+
| 11 | 0.002319 | 0.512036 | 0.01107 | 0.000923 | 0.203359 | 0.003937 |
|
108 |
+
| 12 | 0.002136 | 0.469168 | 0.010157 | 0.0009 | 0.201167 | 0.003825 |
|
109 |
+
| 13 | 0.002081 | 0.455159 | 0.00996 | 0.0009 | 0.198162 | 0.003825 |
|
110 |
+
| 14 | 0.002001 | 0.433845 | 0.009498 | 0.000908 | 0.197196 | 0.003886 |
|
111 |
+
| 15 | 0.001985 | 0.425972 | 0.009495 | 0.000884 | 0.195152 | 0.003785 |
|
112 |
+
| 16 | 0.001951 | 0.427156 | 0.009355 | 0.000874 | 0.196313 | 0.003695 |
|
113 |
+
| 17 | 0.001979 | 0.431152 | 0.009395 | 0.000897 | 0.194316 | 0.003843 |
|
114 |
+
| 18 | 0.00191 | 0.418722 | 0.009082 | 0.000881 | 0.198212 | 0.00374 |
|
115 |
+
| 19 | 0.001922 | 0.422092 | 0.009175 | 0.000906 | 0.195854 | 0.003853 |
|
116 |
+
| 20 | 0.001939 | 0.410425 | 0.009221 | 0.000888 | 0.19173 | 0.003807 |
|
117 |
+
| 21 | 0.001861 | 0.404768 | 0.0089 | 0.000869 | 0.194746 | 0.003693 |
|
118 |
+
| 22 | 0.001848 | 0.407902 | 0.008812 | 0.000908 | 0.201331 | 0.00387 |
|
119 |
+
| 23 | 0.001844 | 0.40212 | 0.008813 | 0.000894 | 0.193942 | 0.003816 |
|
120 |
+
| 24 | 0.001819 | 0.399957 | 0.008706 | 0.000902 | 0.200013 | 0.003802 |
|
121 |
+
| 25 | 0.001825 | 0.400195 | 0.00867 | 0.000909 | 0.198319 | 0.003824 |
|
122 |
+
| 26 | 0.001822 | 0.395797 | 0.008696 | 0.00088 | 0.193306 | 0.003716 |
|