Matej Klemen commited on
Commit
229bda0
·
1 Parent(s): 2f7b8de

Remove option to flag

Browse files
Files changed (1) hide show
  1. app.py +3 -8
app.py CHANGED
@@ -3,7 +3,6 @@ import re
3
  import gradio as gr
4
  import nltk
5
  import torch
6
- # from gradio import HuggingFaceDatasetSaver
7
  from transformers import AutoTokenizer, AutoModelForMaskedLM
8
 
9
 
@@ -50,9 +49,7 @@ cjvt/SloBERTa-slo-word-spelling-annotator</a>.</p>
50
  <p>Given an input text: </p>
51
  <p>1. The input is segmented into sentences and tokenized using NLTK to prepare the model input.</p>
52
  <p>2. The model makes predictions on the sentence level. </p>
53
- <b>The model does not work perfectly and can make mistakes, please check the output!</b> <br />
54
- If the model is performing poorly for an example, you may click the <i>Flag</i> button which will save the example
55
- to a log and help us improve the next iterations of the model. <br/>
56
  """
57
 
58
  demo = gr.Interface(
@@ -68,9 +65,8 @@ demo = gr.Interface(
68
  show_legend=True,
69
  color_map={"error": "red"}),
70
  theme=gr.themes.Base(),
71
- # flagging_callback=HuggingFaceDatasetSaver(os.environ["ACCES_TOKEN_FOR_WRITING_MISTAKES"],
72
- # "slonspell-space-flags", private=True),
73
- description=_description
74
  )
75
 
76
  if __name__ == "__main__":
@@ -79,6 +75,5 @@ if __name__ == "__main__":
79
  model = AutoModelForMaskedLM.from_pretrained(model_name)
80
  mask_token = tokenizer.mask_token
81
  DEVICE = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
82
- print(gr.__version__)
83
 
84
  demo.launch()
 
3
  import gradio as gr
4
  import nltk
5
  import torch
 
6
  from transformers import AutoTokenizer, AutoModelForMaskedLM
7
 
8
 
 
49
  <p>Given an input text: </p>
50
  <p>1. The input is segmented into sentences and tokenized using NLTK to prepare the model input.</p>
51
  <p>2. The model makes predictions on the sentence level. </p>
52
+ <b>The model does not work perfectly and can make mistakes, please check the output!</b>
 
 
53
  """
54
 
55
  demo = gr.Interface(
 
65
  show_legend=True,
66
  color_map={"error": "red"}),
67
  theme=gr.themes.Base(),
68
+ description=_description,
69
+ allow_flagging="never" # RIP flagging to HuggingFace dataset
 
70
  )
71
 
72
  if __name__ == "__main__":
 
75
  model = AutoModelForMaskedLM.from_pretrained(model_name)
76
  mask_token = tokenizer.mask_token
77
  DEVICE = torch.device("cuda") if torch.cuda.is_available() else torch.device("cpu")
 
78
 
79
  demo.launch()