MatteoFasulo commited on
Commit
68c27ef
·
verified ·
1 Parent(s): c892ba1

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +24 -36
README.md CHANGED
@@ -16,6 +16,8 @@ pipeline_tag: text-classification
16
  model-index:
17
  - name: mdeberta-v3-base-subjectivity-bulgarian
18
  results: []
 
 
19
  ---
20
 
21
  # mdeberta-v3-base-subjectivity-bulgarian
@@ -94,51 +96,37 @@ The following hyperparameters were used during training:
94
  You can use this model directly with the Hugging Face `transformers` library for text classification:
95
 
96
  ```python
97
- from transformers import AutoTokenizer, AutoModelForSequenceClassification
98
- import torch
99
-
100
- # Load tokenizer and model
101
- tokenizer = AutoTokenizer.from_pretrained("MatteoFasulo/mdeberta-v3-base-subjectivity-bulgarian")
102
- model = AutoModelForSequenceClassification.from_pretrained("MatteoFasulo/mdeberta-v3-base-subjectivity-bulgarian")
103
-
104
- # Example for an objective sentence (Bulgarian)
105
- text_objective = "Правителството обяви нови мерки за борба с инфлацията." # "The government announced new measures to combat inflation."
106
- inputs_objective = tokenizer(text_objective, return_tensors="pt")
107
-
108
- with torch.no_grad():
109
- logits_objective = model(**inputs_objective).logits
110
-
111
- predicted_class_id_objective = logits_objective.argmax().item()
112
- predicted_label_objective = model.config.id2label[predicted_class_id_objective]
113
-
114
- print(f"Text: '{text_objective}'")
115
- print(f"Predicted label: {predicted_label_objective}")
116
- # Expected output: Predicted label: OBJ (or similar with score)
117
-
118
- # Example for a subjective sentence (Bulgarian)
119
- text_subjective = "Според мен това е най-доброто решение." # "In my opinion, this is the best decision."
120
- inputs_subjective = tokenizer(text_subjective, return_tensors="pt")
121
- with torch.no_grad():
122
- logits_subjective = model(**inputs_subjective).logits
123
- predicted_class_id_subjective = logits_subjective.argmax().item()
124
- predicted_label_subjective = model.config.id2label[predicted_class_id_subjective]
125
- print(f"Text: '{text_subjective}'")
126
- print(f"Predicted label: {predicted_label_subjective}")
127
- # Expected output: Predicted label: SUBJ (or similar with score)
128
  ```
129
 
130
  ## Citation
131
 
132
- If you find this work helpful or inspiring, please consider citing the original paper:
133
 
134
  ```bibtex
135
- @misc{antoun2024camembert20smarterfrench,
136
- title={AI Wizards at CheckThat! 2025: Enhancing Transformer-Based Embeddings with Sentiment for Subjectivity Detection in News Articles},
137
- author={Matteo Fasulo and Stefan Petkov and Antonio Toral},
138
  year={2025},
139
  eprint={2507.11764},
140
  archivePrefix={arXiv},
141
  primaryClass={cs.CL},
142
- url={https://arxiv.org/abs/2507.11764},
143
  }
144
  ```
 
16
  model-index:
17
  - name: mdeberta-v3-base-subjectivity-bulgarian
18
  results: []
19
+ datasets:
20
+ - MatteoFasulo/clef2025_checkthat_task1_subjectivity
21
  ---
22
 
23
  # mdeberta-v3-base-subjectivity-bulgarian
 
96
  You can use this model directly with the Hugging Face `transformers` library for text classification:
97
 
98
  ```python
99
+ from transformers import pipeline
100
+
101
+ # Load the text classification pipeline
102
+ classifier = pipeline(
103
+ "text-classification",
104
+ model="MatteoFasulo/mdeberta-v3-base-subjectivity-bulgarian",
105
+ tokenizer="microsoft/mdeberta-v3-base",
106
+ )
107
+
108
+ # Example usage:
109
+ result1 = classifier("По принцип никой не иска войни, но за нещастие те се случват.")
110
+ print(f"Classification: {result1}")
111
+ # Expected output: [{'label': 'SUBJ', 'score': ...}]
112
+
113
+ result2 = classifier("В един момент започнал сам да търси изход за своето спасение и здраве")
114
+ print(f"Classification: {result2}")
115
+ # Expected output: [{'label': 'OBJ', 'score': ...}]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
116
  ```
117
 
118
  ## Citation
119
 
120
+ If you find our work helpful or inspiring, please feel free to cite it:
121
 
122
  ```bibtex
123
+ @misc{fasulo2025aiwizardscheckthat2025,
124
+ title={AI Wizards at CheckThat! 2025: Enhancing Transformer-Based Embeddings with Sentiment for Subjectivity Detection in News Articles},
125
+ author={Matteo Fasulo and Luca Babboni and Luca Tedeschini},
126
  year={2025},
127
  eprint={2507.11764},
128
  archivePrefix={arXiv},
129
  primaryClass={cs.CL},
130
+ url={https://arxiv.org/abs/2507.11764},
131
  }
132
  ```