Update README.md
Browse files
README.md
CHANGED
|
@@ -45,14 +45,13 @@ We have fine-tuned all pre-trained models on 3 legal tasks with Indian datasets:
|
|
| 45 |
|
| 46 |
### Citation
|
| 47 |
```
|
| 48 |
-
@
|
| 49 |
doi = {10.48550/ARXIV.2209.06049},
|
| 50 |
url = {https://arxiv.org/abs/2209.06049},
|
| 51 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
| 52 |
-
title = {Pre-
|
| 53 |
-
|
| 54 |
-
year = {
|
| 55 |
-
copyright = {Creative Commons Attribution 4.0 International}
|
| 56 |
}
|
| 57 |
```
|
| 58 |
|
|
|
|
| 45 |
|
| 46 |
### Citation
|
| 47 |
```
|
| 48 |
+
@inproceedings{paul-2022-pretraining,
|
| 49 |
doi = {10.48550/ARXIV.2209.06049},
|
| 50 |
url = {https://arxiv.org/abs/2209.06049},
|
| 51 |
author = {Paul, Shounak and Mandal, Arpan and Goyal, Pawan and Ghosh, Saptarshi},
|
| 52 |
+
title = {Pre-trained Language Models for the Legal Domain: A Case Study on Indian Law},
|
| 53 |
+
booktitle = {Proceedings of ICAIL 2023}
|
| 54 |
+
year = {2023},
|
|
|
|
| 55 |
}
|
| 56 |
```
|
| 57 |
|