tokenizer --> self.tokenizer
Browse files- handler.py +4 -4
handler.py
CHANGED
@@ -139,13 +139,13 @@ class EndpointHandler:
|
|
139 |
processed_outputs = {"up": [], "down": []}
|
140 |
if mode == 'meta2diff':
|
141 |
for output in outputs:
|
142 |
-
up_split_index = output.index(tokenizer.convert_tokens_to_ids('</up>'))
|
143 |
-
generated_up_raw = [i.strip() for i in tokenizer.convert_ids_to_tokens(output[:up_split_index])]
|
144 |
generated_up = sorted(set(generated_up_raw) & set(self.unique_genes_p3), key = generated_up_raw.index)
|
145 |
processed_outputs['up'].append(generated_up)
|
146 |
|
147 |
-
down_split_index = output.index(tokenizer.convert_tokens_to_ids('</down>'))
|
148 |
-
generated_down_raw = [i.strip() for i in tokenizer.convert_ids_to_tokens(output[up_split_index:down_split_index+1])]
|
149 |
generated_down = sorted(set(generated_down_raw) & set(self.unique_genes_p3), key = generated_down_raw.index)
|
150 |
processed_outputs['down'].append(generated_down)
|
151 |
|
|
|
139 |
processed_outputs = {"up": [], "down": []}
|
140 |
if mode == 'meta2diff':
|
141 |
for output in outputs:
|
142 |
+
up_split_index = output.index(self.tokenizer.convert_tokens_to_ids('</up>'))
|
143 |
+
generated_up_raw = [i.strip() for i in self.tokenizer.convert_ids_to_tokens(output[:up_split_index])]
|
144 |
generated_up = sorted(set(generated_up_raw) & set(self.unique_genes_p3), key = generated_up_raw.index)
|
145 |
processed_outputs['up'].append(generated_up)
|
146 |
|
147 |
+
down_split_index = output.index(self.tokenizer.convert_tokens_to_ids('</down>'))
|
148 |
+
generated_down_raw = [i.strip() for i in self.tokenizer.convert_ids_to_tokens(output[up_split_index:down_split_index+1])]
|
149 |
generated_down = sorted(set(generated_down_raw) & set(self.unique_genes_p3), key = generated_down_raw.index)
|
150 |
processed_outputs['down'].append(generated_down)
|
151 |
|