Update README.md
Browse files
README.md
CHANGED
@@ -23,15 +23,15 @@ model-index:
|
|
23 |
metrics:
|
24 |
- name: Precision
|
25 |
type: precision
|
26 |
-
value:
|
27 |
verified: false
|
28 |
- name: Recall
|
29 |
type: recall
|
30 |
-
value:
|
31 |
verified: false
|
32 |
- name: F1
|
33 |
type: f1
|
34 |
-
value:
|
35 |
verified: false
|
36 |
- task:
|
37 |
type: text-generation
|
@@ -41,15 +41,15 @@ model-index:
|
|
41 |
metrics:
|
42 |
- name: Precision
|
43 |
type: precision
|
44 |
-
value:
|
45 |
verified: false
|
46 |
- name: Recall
|
47 |
type: recall
|
48 |
-
value:
|
49 |
verified: false
|
50 |
- name: F1
|
51 |
type: f1
|
52 |
-
value:
|
53 |
verified: false
|
54 |
- task:
|
55 |
type: text-generation
|
@@ -59,15 +59,15 @@ model-index:
|
|
59 |
metrics:
|
60 |
- name: Precision
|
61 |
type: precision
|
62 |
-
value:
|
63 |
verified: false
|
64 |
- name: Recall
|
65 |
type: recall
|
66 |
-
value:
|
67 |
verified: false
|
68 |
- name: F1
|
69 |
type: f1
|
70 |
-
value:
|
71 |
verified: false
|
72 |
- task:
|
73 |
type: text-generation
|
@@ -77,15 +77,15 @@ model-index:
|
|
77 |
metrics:
|
78 |
- name: Precision
|
79 |
type: precision
|
80 |
-
value:
|
81 |
verified: false
|
82 |
- name: Recall
|
83 |
type: recall
|
84 |
-
value:
|
85 |
verified: false
|
86 |
- name: F1
|
87 |
type: f1
|
88 |
-
value:
|
89 |
verified: false
|
90 |
- task:
|
91 |
type: text-generation
|
@@ -164,7 +164,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
|
|
164 |
**RUSpellRU**
|
165 |
| Model | Precision | Recall | F1 |
|
166 |
| --- | --- | --- | --- |
|
167 |
-
| sage-mt5-large |
|
168 |
| sage-mt5-large (ft.) | 88.4 | 71.6 | 79.1 |
|
169 |
| sage-ai-service | 93.5 | 82.4 | 87.6 |
|
170 |
| gpt-3.5-turbo | 39.6 | 62.3 | 48.5 |
|
@@ -173,7 +173,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
|
|
173 |
**MultidomainGold**
|
174 |
| Model | Precision | Recall | F1 |
|
175 |
| --- | --- | --- | --- |
|
176 |
-
| sage-mt5-large |
|
177 |
| sage-mt5-large (ft.) | 65.3 | 62.7 | 63.9 |
|
178 |
| sage-ai-service | 70.9 | 68.8 | 69.9 |
|
179 |
| gpt-3.5-turbo | 17.8 | 56.1 | 27.0 |
|
@@ -182,7 +182,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
|
|
182 |
**MedSpellChecker**
|
183 |
| Model | Precision | Recall | F1 |
|
184 |
| --- | --- | --- | --- |
|
185 |
-
| sage-mt5-large |
|
186 |
| sage-mt5-large (ft.) | 77.7 | 77.5 | 77.6 |
|
187 |
| sage-ai-service | 73.4 | 76.2 | 74.9 |
|
188 |
| gpt-3.5-turbo | 15.1 | 53.6 | 23.5 |
|
@@ -191,7 +191,7 @@ RUSpellRU, MultidomainGold, MedSpellChecker, GitHubTypoCorpusRu are datasets for
|
|
191 |
**GitHubTypoCorpusRu**
|
192 |
| Model | Precision | Recall | F1 |
|
193 |
| --- | --- | --- | --- |
|
194 |
-
| sage-mt5-large |
|
195 |
| sage-mt5-large (ft.) | 69.5 | 46.0 | 55.3 |
|
196 |
| sage-ai-service | 76.1 | 51.2 | 61.2 |
|
197 |
| gpt-3.5-turbo | 23.7 | 43.9 | 30.8 |
|
|
|
23 |
metrics:
|
24 |
- name: Precision
|
25 |
type: precision
|
26 |
+
value: 55.7
|
27 |
verified: false
|
28 |
- name: Recall
|
29 |
type: recall
|
30 |
+
value: 68.5
|
31 |
verified: false
|
32 |
- name: F1
|
33 |
type: f1
|
34 |
+
value: 61.4
|
35 |
verified: false
|
36 |
- task:
|
37 |
type: text-generation
|
|
|
41 |
metrics:
|
42 |
- name: Precision
|
43 |
type: precision
|
44 |
+
value: 35.4
|
45 |
verified: false
|
46 |
- name: Recall
|
47 |
type: recall
|
48 |
+
value: 57.9
|
49 |
verified: false
|
50 |
- name: F1
|
51 |
type: f1
|
52 |
+
value: 43.9
|
53 |
verified: false
|
54 |
- task:
|
55 |
type: text-generation
|
|
|
59 |
metrics:
|
60 |
- name: Precision
|
61 |
type: precision
|
62 |
+
value: 35.1
|
63 |
verified: false
|
64 |
- name: Recall
|
65 |
type: recall
|
66 |
+
value: 70.8
|
67 |
verified: false
|
68 |
- name: F1
|
69 |
type: f1
|
70 |
+
value: 47.0
|
71 |
verified: false
|
72 |
- task:
|
73 |
type: text-generation
|
|
|
77 |
metrics:
|
78 |
- name: Precision
|
79 |
type: precision
|
80 |
+
value: 47.4
|
81 |
verified: false
|
82 |
- name: Recall
|
83 |
type: recall
|
84 |
+
value: 53.8
|
85 |
verified: false
|
86 |
- name: F1
|
87 |
type: f1
|
88 |
+
value: 50.4
|
89 |
verified: false
|
90 |
- task:
|
91 |
type: text-generation
|
|
|
164 |
**RUSpellRU**
|
165 |
| Model | Precision | Recall | F1 |
|
166 |
| --- | --- | --- | --- |
|
167 |
+
| sage-mt5-large | 55.7 | 68.5 | 61.4 |
|
168 |
| sage-mt5-large (ft.) | 88.4 | 71.6 | 79.1 |
|
169 |
| sage-ai-service | 93.5 | 82.4 | 87.6 |
|
170 |
| gpt-3.5-turbo | 39.6 | 62.3 | 48.5 |
|
|
|
173 |
**MultidomainGold**
|
174 |
| Model | Precision | Recall | F1 |
|
175 |
| --- | --- | --- | --- |
|
176 |
+
| sage-mt5-large | 35.4 | 57.9 | 43.9 |
|
177 |
| sage-mt5-large (ft.) | 65.3 | 62.7 | 63.9 |
|
178 |
| sage-ai-service | 70.9 | 68.8 | 69.9 |
|
179 |
| gpt-3.5-turbo | 17.8 | 56.1 | 27.0 |
|
|
|
182 |
**MedSpellChecker**
|
183 |
| Model | Precision | Recall | F1 |
|
184 |
| --- | --- | --- | --- |
|
185 |
+
| sage-mt5-large | 35.1 | 70.8 | 47.0 |
|
186 |
| sage-mt5-large (ft.) | 77.7 | 77.5 | 77.6 |
|
187 |
| sage-ai-service | 73.4 | 76.2 | 74.9 |
|
188 |
| gpt-3.5-turbo | 15.1 | 53.6 | 23.5 |
|
|
|
191 |
**GitHubTypoCorpusRu**
|
192 |
| Model | Precision | Recall | F1 |
|
193 |
| --- | --- | --- | --- |
|
194 |
+
| sage-mt5-large | 47.4 | 53.8 | 50.4 |
|
195 |
| sage-mt5-large (ft.) | 69.5 | 46.0 | 55.3 |
|
196 |
| sage-ai-service | 76.1 | 51.2 | 61.2 |
|
197 |
| gpt-3.5-turbo | 23.7 | 43.9 | 30.8 |
|