Commit
·
bf57492
1
Parent(s):
1db84e2
Update README.md
Browse files
README.md
CHANGED
@@ -50,6 +50,22 @@ The following hyperparameters were used during training:
|
|
50 |
| 0.7064 | 4.0 | 40 | 0.8390 |
|
51 |
| 0.6468 | 5.0 | 50 | 0.8028 |
|
52 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
|
54 |
### Framework versions
|
55 |
|
|
|
50 |
| 0.7064 | 4.0 | 40 | 0.8390 |
|
51 |
| 0.6468 | 5.0 | 50 | 0.8028 |
|
52 |
|
53 |
+
```
|
54 |
+
CONVERSION_PROMPT = 'LCT\n' # LaTeX conversion task
|
55 |
+
|
56 |
+
CONVERSION_TOKEN = 'LaTeX:'
|
57 |
+
|
58 |
+
loaded_model = GPT2LMHeadModel.from_pretrained('Andyrasika/math_english_to_latex')
|
59 |
+
latex_generator = pipeline('text-generation', model=loaded_model, tokenizer=tokenizer)
|
60 |
+
|
61 |
+
text_sample = 'r of x is sum from 0 to x of x squared'
|
62 |
+
conversion_text_sample = f'{CONVERSION_PROMPT}English: {text_sample}\n{CONVERSION_TOKEN}'
|
63 |
+
|
64 |
+
print(latex_generator(
|
65 |
+
conversion_text_sample, num_beams=5, early_stopping=True, temperature=0.7,
|
66 |
+
max_length=len(tokenizer.encode(conversion_text_sample)) + 20
|
67 |
+
)[0]['generated_text'])
|
68 |
+
```
|
69 |
|
70 |
### Framework versions
|
71 |
|