Update constants.py
Browse filesadd load hf-token from .env
- constants.py +5 -1
constants.py
CHANGED
@@ -1,5 +1,9 @@
|
|
1 |
from datasets import load_dataset
|
|
|
|
|
2 |
|
|
|
|
|
3 |
|
4 |
# Constants values
|
5 |
LEADERBOARD_PATH = "atlasia/Open-Arabic-Dialect-Identification-Leaderboard"
|
@@ -86,7 +90,7 @@ default_languages = [
|
|
86 |
]
|
87 |
|
88 |
# Load eval dataset
|
89 |
-
eval_dataset = load_dataset(DATA_PATH, split='test')
|
90 |
|
91 |
# Supported dialects
|
92 |
all_target_languages = list(eval_dataset.unique("dialect"))
|
|
|
1 |
from datasets import load_dataset
|
2 |
+
from dotenv import load_dotenv
|
3 |
+
import os
|
4 |
|
5 |
+
load_dotenv()
|
6 |
+
HF_TOKEN=os.environ['HF_TOKEN']
|
7 |
|
8 |
# Constants values
|
9 |
LEADERBOARD_PATH = "atlasia/Open-Arabic-Dialect-Identification-Leaderboard"
|
|
|
90 |
]
|
91 |
|
92 |
# Load eval dataset
|
93 |
+
eval_dataset = load_dataset(DATA_PATH, split='test',token=HF_TOKEN)
|
94 |
|
95 |
# Supported dialects
|
96 |
all_target_languages = list(eval_dataset.unique("dialect"))
|