Add new CrossEncoder model
Browse files- README.md +733 -0
- config.json +57 -0
- model.safetensors +3 -0
- special_tokens_map.json +37 -0
- tokenizer.json +0 -0
- tokenizer_config.json +945 -0
README.md
ADDED
|
@@ -0,0 +1,733 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
language:
|
| 3 |
+
- en
|
| 4 |
+
tags:
|
| 5 |
+
- sentence-transformers
|
| 6 |
+
- cross-encoder
|
| 7 |
+
- reranker
|
| 8 |
+
- generated_from_trainer
|
| 9 |
+
- dataset_size:78704
|
| 10 |
+
- loss:ListNetLoss
|
| 11 |
+
base_model: jhu-clsp/ettin-encoder-32m
|
| 12 |
+
datasets:
|
| 13 |
+
- microsoft/ms_marco
|
| 14 |
+
pipeline_tag: text-ranking
|
| 15 |
+
library_name: sentence-transformers
|
| 16 |
+
metrics:
|
| 17 |
+
- map
|
| 18 |
+
- mrr@10
|
| 19 |
+
- ndcg@10
|
| 20 |
+
model-index:
|
| 21 |
+
- name: CrossEncoder based on jhu-clsp/ettin-encoder-32m
|
| 22 |
+
results:
|
| 23 |
+
- task:
|
| 24 |
+
type: cross-encoder-reranking
|
| 25 |
+
name: Cross Encoder Reranking
|
| 26 |
+
dataset:
|
| 27 |
+
name: NanoMSMARCO R100
|
| 28 |
+
type: NanoMSMARCO_R100
|
| 29 |
+
metrics:
|
| 30 |
+
- type: map
|
| 31 |
+
value: 0.4818
|
| 32 |
+
name: Map
|
| 33 |
+
- type: mrr@10
|
| 34 |
+
value: 0.4698
|
| 35 |
+
name: Mrr@10
|
| 36 |
+
- type: ndcg@10
|
| 37 |
+
value: 0.5348
|
| 38 |
+
name: Ndcg@10
|
| 39 |
+
- task:
|
| 40 |
+
type: cross-encoder-reranking
|
| 41 |
+
name: Cross Encoder Reranking
|
| 42 |
+
dataset:
|
| 43 |
+
name: NanoNFCorpus R100
|
| 44 |
+
type: NanoNFCorpus_R100
|
| 45 |
+
metrics:
|
| 46 |
+
- type: map
|
| 47 |
+
value: 0.3292
|
| 48 |
+
name: Map
|
| 49 |
+
- type: mrr@10
|
| 50 |
+
value: 0.5225
|
| 51 |
+
name: Mrr@10
|
| 52 |
+
- type: ndcg@10
|
| 53 |
+
value: 0.3376
|
| 54 |
+
name: Ndcg@10
|
| 55 |
+
- task:
|
| 56 |
+
type: cross-encoder-reranking
|
| 57 |
+
name: Cross Encoder Reranking
|
| 58 |
+
dataset:
|
| 59 |
+
name: NanoNQ R100
|
| 60 |
+
type: NanoNQ_R100
|
| 61 |
+
metrics:
|
| 62 |
+
- type: map
|
| 63 |
+
value: 0.5188
|
| 64 |
+
name: Map
|
| 65 |
+
- type: mrr@10
|
| 66 |
+
value: 0.5268
|
| 67 |
+
name: Mrr@10
|
| 68 |
+
- type: ndcg@10
|
| 69 |
+
value: 0.5534
|
| 70 |
+
name: Ndcg@10
|
| 71 |
+
- task:
|
| 72 |
+
type: cross-encoder-nano-beir
|
| 73 |
+
name: Cross Encoder Nano BEIR
|
| 74 |
+
dataset:
|
| 75 |
+
name: NanoBEIR R100 mean
|
| 76 |
+
type: NanoBEIR_R100_mean
|
| 77 |
+
metrics:
|
| 78 |
+
- type: map
|
| 79 |
+
value: 0.4432
|
| 80 |
+
name: Map
|
| 81 |
+
- type: mrr@10
|
| 82 |
+
value: 0.5064
|
| 83 |
+
name: Mrr@10
|
| 84 |
+
- type: ndcg@10
|
| 85 |
+
value: 0.4753
|
| 86 |
+
name: Ndcg@10
|
| 87 |
+
---
|
| 88 |
+
|
| 89 |
+
# CrossEncoder based on jhu-clsp/ettin-encoder-32m
|
| 90 |
+
|
| 91 |
+
This is a [Cross Encoder](https://www.sbert.net/docs/cross_encoder/usage/usage.html) model finetuned from [jhu-clsp/ettin-encoder-32m](https://huggingface.co/jhu-clsp/ettin-encoder-32m) on the [ms_marco](https://huggingface.co/datasets/microsoft/ms_marco) dataset using the [sentence-transformers](https://www.SBERT.net) library. It computes scores for pairs of texts, which can be used for text reranking and semantic search.
|
| 92 |
+
|
| 93 |
+
## Model Details
|
| 94 |
+
|
| 95 |
+
### Model Description
|
| 96 |
+
- **Model Type:** Cross Encoder
|
| 97 |
+
- **Base model:** [jhu-clsp/ettin-encoder-32m](https://huggingface.co/jhu-clsp/ettin-encoder-32m) <!-- at revision 1b8ba06455dd44f80fc9c1ca9e22806157a57379 -->
|
| 98 |
+
- **Maximum Sequence Length:** 7999 tokens
|
| 99 |
+
- **Number of Output Labels:** 1 label
|
| 100 |
+
- **Training Dataset:**
|
| 101 |
+
- [ms_marco](https://huggingface.co/datasets/microsoft/ms_marco)
|
| 102 |
+
- **Language:** en
|
| 103 |
+
<!-- - **License:** Unknown -->
|
| 104 |
+
|
| 105 |
+
### Model Sources
|
| 106 |
+
|
| 107 |
+
- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
|
| 108 |
+
- **Documentation:** [Cross Encoder Documentation](https://www.sbert.net/docs/cross_encoder/usage/usage.html)
|
| 109 |
+
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
|
| 110 |
+
- **Hugging Face:** [Cross Encoders on Hugging Face](https://huggingface.co/models?library=sentence-transformers&other=cross-encoder)
|
| 111 |
+
|
| 112 |
+
## Usage
|
| 113 |
+
|
| 114 |
+
### Direct Usage (Sentence Transformers)
|
| 115 |
+
|
| 116 |
+
First install the Sentence Transformers library:
|
| 117 |
+
|
| 118 |
+
```bash
|
| 119 |
+
pip install -U sentence-transformers
|
| 120 |
+
```
|
| 121 |
+
|
| 122 |
+
Then you can load this model and run inference.
|
| 123 |
+
```python
|
| 124 |
+
from sentence_transformers import CrossEncoder
|
| 125 |
+
|
| 126 |
+
# Download from the 🤗 Hub
|
| 127 |
+
model = CrossEncoder("rahulseetharaman/reranker-msmarco-v1.1-ettin-encoder-32m-listnet")
|
| 128 |
+
# Get scores for pairs of texts
|
| 129 |
+
pairs = [
|
| 130 |
+
['incubation period for food poisoning', 'Symptoms of chicken food poisoning. The period of time from when the contaminated food is eaten to when the symptoms appear is called the ‘incubation period’. In most cases of food poisoning the symptoms appear between 24 and 48 hours. These include: 1 Fever. 2 Abdominal pains. 3 Upset stomach. 4 Nausea. 5 Vomit'],
|
| 131 |
+
['incubation period for food poisoning', 'The symptoms of food poisoning are diarrhea, nausea, vomiting, exhaustion and stomach cramps. The symptoms depend on the type of bacteria that cause the infection. It is very important to get yourself checked as there are certain bacteria that have incubation periods as long as 3-4 days. 3. Food poisoning is an illness caused by eating contaminated food. In most of the cases, the food is contaminated by bacteria and viruses or the food is cooked at the wrong temperatures. Although food poisoning is generally thought of as a mild illness, there are cases when it can develop into serious conditions.'],
|
| 132 |
+
['incubation period for food poisoning', 'The incubation period typically lasts 24-48 hours and the symptoms usually pass in a couple of days. In young children, the rotavirus is a common cause of infection from contaminated food. The symptoms usually develop within a week and pass in around five to seven days. The bacteria are usually found on raw or undercooked meat (particularly poultry), unpasteurised milk and untreated water. The incubation period (the time between eating contaminated food and the start of symptoms) for food poisoning caused by campylobacter is usually between two and five days. The symptoms usually last less than a week'],
|
| 133 |
+
['incubation period for food poisoning', 'Incubation Period. The period between when the contaminated food is ingested and the time when symptoms first appear is known as the incubation period, according to the CDC. The length of the incubation period depends on the quantity consumed and the type of organism. Incubation Period for Food Poisoning. Food poisoning is an extremely unpleasant condition that results from eating rotten or otherwise tainted food.'],
|
| 134 |
+
['incubation period for food poisoning', 'Foodborne illness (also foodborne disease and colloquially referred to as food poisoning) is any illness resulting from the food spoilage of contaminated food, pathogenic bacteria, viruses, or parasites that contaminate food, as well as chemical or natural toxins such as poisonous mushrooms. '],
|
| 135 |
+
]
|
| 136 |
+
scores = model.predict(pairs)
|
| 137 |
+
print(scores.shape)
|
| 138 |
+
# (5,)
|
| 139 |
+
|
| 140 |
+
# Or rank different texts based on similarity to a single text
|
| 141 |
+
ranks = model.rank(
|
| 142 |
+
'incubation period for food poisoning',
|
| 143 |
+
[
|
| 144 |
+
'Symptoms of chicken food poisoning. The period of time from when the contaminated food is eaten to when the symptoms appear is called the ‘incubation period’. In most cases of food poisoning the symptoms appear between 24 and 48 hours. These include: 1 Fever. 2 Abdominal pains. 3 Upset stomach. 4 Nausea. 5 Vomit',
|
| 145 |
+
'The symptoms of food poisoning are diarrhea, nausea, vomiting, exhaustion and stomach cramps. The symptoms depend on the type of bacteria that cause the infection. It is very important to get yourself checked as there are certain bacteria that have incubation periods as long as 3-4 days. 3. Food poisoning is an illness caused by eating contaminated food. In most of the cases, the food is contaminated by bacteria and viruses or the food is cooked at the wrong temperatures. Although food poisoning is generally thought of as a mild illness, there are cases when it can develop into serious conditions.',
|
| 146 |
+
'The incubation period typically lasts 24-48 hours and the symptoms usually pass in a couple of days. In young children, the rotavirus is a common cause of infection from contaminated food. The symptoms usually develop within a week and pass in around five to seven days. The bacteria are usually found on raw or undercooked meat (particularly poultry), unpasteurised milk and untreated water. The incubation period (the time between eating contaminated food and the start of symptoms) for food poisoning caused by campylobacter is usually between two and five days. The symptoms usually last less than a week',
|
| 147 |
+
'Incubation Period. The period between when the contaminated food is ingested and the time when symptoms first appear is known as the incubation period, according to the CDC. The length of the incubation period depends on the quantity consumed and the type of organism. Incubation Period for Food Poisoning. Food poisoning is an extremely unpleasant condition that results from eating rotten or otherwise tainted food.',
|
| 148 |
+
'Foodborne illness (also foodborne disease and colloquially referred to as food poisoning) is any illness resulting from the food spoilage of contaminated food, pathogenic bacteria, viruses, or parasites that contaminate food, as well as chemical or natural toxins such as poisonous mushrooms. ',
|
| 149 |
+
]
|
| 150 |
+
)
|
| 151 |
+
# [{'corpus_id': ..., 'score': ...}, {'corpus_id': ..., 'score': ...}, ...]
|
| 152 |
+
```
|
| 153 |
+
|
| 154 |
+
<!--
|
| 155 |
+
### Direct Usage (Transformers)
|
| 156 |
+
|
| 157 |
+
<details><summary>Click to see the direct usage in Transformers</summary>
|
| 158 |
+
|
| 159 |
+
</details>
|
| 160 |
+
-->
|
| 161 |
+
|
| 162 |
+
<!--
|
| 163 |
+
### Downstream Usage (Sentence Transformers)
|
| 164 |
+
|
| 165 |
+
You can finetune this model on your own dataset.
|
| 166 |
+
|
| 167 |
+
<details><summary>Click to expand</summary>
|
| 168 |
+
|
| 169 |
+
</details>
|
| 170 |
+
-->
|
| 171 |
+
|
| 172 |
+
<!--
|
| 173 |
+
### Out-of-Scope Use
|
| 174 |
+
|
| 175 |
+
*List how the model may foreseeably be misused and address what users ought not to do with the model.*
|
| 176 |
+
-->
|
| 177 |
+
|
| 178 |
+
## Evaluation
|
| 179 |
+
|
| 180 |
+
### Metrics
|
| 181 |
+
|
| 182 |
+
#### Cross Encoder Reranking
|
| 183 |
+
|
| 184 |
+
* Datasets: `NanoMSMARCO_R100`, `NanoNFCorpus_R100` and `NanoNQ_R100`
|
| 185 |
+
* Evaluated with [<code>CrossEncoderRerankingEvaluator</code>](https://sbert.net/docs/package_reference/cross_encoder/evaluation.html#sentence_transformers.cross_encoder.evaluation.CrossEncoderRerankingEvaluator) with these parameters:
|
| 186 |
+
```json
|
| 187 |
+
{
|
| 188 |
+
"at_k": 10,
|
| 189 |
+
"always_rerank_positives": true
|
| 190 |
+
}
|
| 191 |
+
```
|
| 192 |
+
|
| 193 |
+
| Metric | NanoMSMARCO_R100 | NanoNFCorpus_R100 | NanoNQ_R100 |
|
| 194 |
+
|:------------|:---------------------|:---------------------|:---------------------|
|
| 195 |
+
| map | 0.4818 (-0.0078) | 0.3292 (+0.0682) | 0.5188 (+0.0992) |
|
| 196 |
+
| mrr@10 | 0.4698 (-0.0077) | 0.5225 (+0.0226) | 0.5268 (+0.1001) |
|
| 197 |
+
| **ndcg@10** | **0.5348 (-0.0056)** | **0.3376 (+0.0126)** | **0.5534 (+0.0528)** |
|
| 198 |
+
|
| 199 |
+
#### Cross Encoder Nano BEIR
|
| 200 |
+
|
| 201 |
+
* Dataset: `NanoBEIR_R100_mean`
|
| 202 |
+
* Evaluated with [<code>CrossEncoderNanoBEIREvaluator</code>](https://sbert.net/docs/package_reference/cross_encoder/evaluation.html#sentence_transformers.cross_encoder.evaluation.CrossEncoderNanoBEIREvaluator) with these parameters:
|
| 203 |
+
```json
|
| 204 |
+
{
|
| 205 |
+
"dataset_names": [
|
| 206 |
+
"msmarco",
|
| 207 |
+
"nfcorpus",
|
| 208 |
+
"nq"
|
| 209 |
+
],
|
| 210 |
+
"rerank_k": 100,
|
| 211 |
+
"at_k": 10,
|
| 212 |
+
"always_rerank_positives": true
|
| 213 |
+
}
|
| 214 |
+
```
|
| 215 |
+
|
| 216 |
+
| Metric | Value |
|
| 217 |
+
|:------------|:---------------------|
|
| 218 |
+
| map | 0.4432 (+0.0532) |
|
| 219 |
+
| mrr@10 | 0.5064 (+0.0383) |
|
| 220 |
+
| **ndcg@10** | **0.4753 (+0.0199)** |
|
| 221 |
+
|
| 222 |
+
<!--
|
| 223 |
+
## Bias, Risks and Limitations
|
| 224 |
+
|
| 225 |
+
*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
|
| 226 |
+
-->
|
| 227 |
+
|
| 228 |
+
<!--
|
| 229 |
+
### Recommendations
|
| 230 |
+
|
| 231 |
+
*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
|
| 232 |
+
-->
|
| 233 |
+
|
| 234 |
+
## Training Details
|
| 235 |
+
|
| 236 |
+
### Training Dataset
|
| 237 |
+
|
| 238 |
+
#### ms_marco
|
| 239 |
+
|
| 240 |
+
* Dataset: [ms_marco](https://huggingface.co/datasets/microsoft/ms_marco) at [a47ee7a](https://huggingface.co/datasets/microsoft/ms_marco/tree/a47ee7aae8d7d466ba15f9f0bfac3b3681087b3a)
|
| 241 |
+
* Size: 78,704 training samples
|
| 242 |
+
* Columns: <code>query</code>, <code>docs</code>, and <code>labels</code>
|
| 243 |
+
* Approximate statistics based on the first 1000 samples:
|
| 244 |
+
| | query | docs | labels |
|
| 245 |
+
|:--------|:------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------|
|
| 246 |
+
| type | string | list | list |
|
| 247 |
+
| details | <ul><li>min: 11 characters</li><li>mean: 33.46 characters</li><li>max: 101 characters</li></ul> | <ul><li>min: 2 elements</li><li>mean: 6.00 elements</li><li>max: 10 elements</li></ul> | <ul><li>min: 2 elements</li><li>mean: 6.00 elements</li><li>max: 10 elements</li></ul> |
|
| 248 |
+
* Samples:
|
| 249 |
+
| query | docs | labels |
|
| 250 |
+
|:---------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------|
|
| 251 |
+
| <code>how is obsidian formed</code> | <code>['OBSIDIAN-Formed from rapid cooling of felsic lavas, high in silica and aluminum. Obsidian is an extrusive igneous rock, formed from the extremely rapid cooling of felsic lav … a which includes orthoclase, quartz, and mica. Obsidian is a volcanic glass, meaning that the mineral constituents from which it formed did not have time to crystallize due to the rapid cooling.', 'OBSIDIAN-Formed from rapid cooling of felsic lavas, high in silica and aluminum. Obsidian is an extrusive igneous rock, formed from the extremely rapid cooling of felsic lava which includes orthoclase, quartz, and mica.', 'Obsidian is an igneous rock that forms when molten rock cools very rapidly. The result is a rock that cooled so fast, crystals did not get a chance to form. Obsidian is a volcanic glass with a smooth and uniform structure.', 'Obsidian is an igneous rock that forms when molten rock material cools so rapidly that atoms are unable to arrange themselves into a crystalline structure. It is an amorphous ...</code> | <code>[1, 0, 0, 0, 0, ...]</code> |
|
| 252 |
+
| <code>estimated construction cost per square foot</code> | <code>['by Jay Behm-A very basic way to estimate the cost of building a detached garage is to simply apply a typical average per square foot construction of $30 to $40. That is for an average cost basis for having a contractor / builder build a basic one story garage for you on your property. If you are in an area with higher-than-average cost of living you can use $50 to $55. If there are complications or you prefer top quality materials and components the price can go up.So, generally speaking, a 24 ft.sq., two car, basic garage will cost in the range of $17,000 to $23,000.', 'An average commercial steel building costs between $16 and $20 per square foot, including building package (I-Beams, purlins, girts etc.) , delivery, foundation and the cost of construction. Since many retail & commercial buildings require additional finishing like insulation or facade façade, customization the cost may rise to$ 30 or$ 40 per square (FOOT). sf', 'Since many retail & commercial buildings require addit...</code> | <code>[1, 0, 0, 0, 0, ...]</code> |
|
| 253 |
+
| <code>what is a cove</code> | <code>['cove 1 n 1 a small sheltered bay in the shoreline of a sea river or lake 2 a a recess or small valley in the side of a mountain b a cave or cavern 3 cove 1 1 a small indentation or recess in the shoreline of a sea lake or river 2 a sheltered nook 3 a hollow or recess in a mountain cavern 4 a narrow pass or sheltered area between woods or hills 5 a concave architectural surface or molding esp one linking a ceiling and a wall', 'a cove is a small type of bay or coastal inlet coves usually have narrow restricted entrances are often circular or oval and are often situated within a larger bay small narrow sheltered bays inlets creeks or recesses in a coast are often considered coves ', 'cove s name comes from the niche created by the base of mt fanny mill creek and the ridges to the south our views are stunning our sunsets beautiful and our soil rich sunset in cove mt fanny a 7150 foot peak in the wallowa range dominates the eastward view about cove cove is a little slice of heaven locate...</code> | <code>[1, 1, 0, 0, 0, ...]</code> |
|
| 254 |
+
* Loss: [<code>ListNetLoss</code>](https://sbert.net/docs/package_reference/cross_encoder/losses.html#listnetloss) with these parameters:
|
| 255 |
+
```json
|
| 256 |
+
{
|
| 257 |
+
"activation_fn": "torch.nn.modules.linear.Identity",
|
| 258 |
+
"mini_batch_size": 16
|
| 259 |
+
}
|
| 260 |
+
```
|
| 261 |
+
|
| 262 |
+
### Evaluation Dataset
|
| 263 |
+
|
| 264 |
+
#### ms_marco
|
| 265 |
+
|
| 266 |
+
* Dataset: [ms_marco](https://huggingface.co/datasets/microsoft/ms_marco) at [a47ee7a](https://huggingface.co/datasets/microsoft/ms_marco/tree/a47ee7aae8d7d466ba15f9f0bfac3b3681087b3a)
|
| 267 |
+
* Size: 1,000 evaluation samples
|
| 268 |
+
* Columns: <code>query</code>, <code>docs</code>, and <code>labels</code>
|
| 269 |
+
* Approximate statistics based on the first 1000 samples:
|
| 270 |
+
| | query | docs | labels |
|
| 271 |
+
|:--------|:-----------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------|
|
| 272 |
+
| type | string | list | list |
|
| 273 |
+
| details | <ul><li>min: 10 characters</li><li>mean: 34.41 characters</li><li>max: 99 characters</li></ul> | <ul><li>min: 2 elements</li><li>mean: 6.00 elements</li><li>max: 10 elements</li></ul> | <ul><li>min: 2 elements</li><li>mean: 6.00 elements</li><li>max: 10 elements</li></ul> |
|
| 274 |
+
* Samples:
|
| 275 |
+
| query | docs | labels |
|
| 276 |
+
|:--------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------|
|
| 277 |
+
| <code>incubation period for food poisoning</code> | <code>['Symptoms of chicken food poisoning. The period of time from when the contaminated food is eaten to when the symptoms appear is called the ‘incubation period’. In most cases of food poisoning the symptoms appear between 24 and 48 hours. These include: 1 Fever. 2 Abdominal pains. 3 Upset stomach. 4 Nausea. 5 Vomit', 'The symptoms of food poisoning are diarrhea, nausea, vomiting, exhaustion and stomach cramps. The symptoms depend on the type of bacteria that cause the infection. It is very important to get yourself checked as there are certain bacteria that have incubation periods as long as 3-4 days. 3. Food poisoning is an illness caused by eating contaminated food. In most of the cases, the food is contaminated by bacteria and viruses or the food is cooked at the wrong temperatures. Although food poisoning is generally thought of as a mild illness, there are cases when it can develop into serious conditions.', 'The incubation period typically lasts 24-48 hours and the symptoms u...</code> | <code>[1, 0, 0, 0, 0, ...]</code> |
|
| 278 |
+
| <code>what are nenet tents made from</code> | <code>['Nenets herders live in tepee style tents ‘mya’ the covers of which are made from reindeer skin for use in winter and canvas in the summer. Their herding involves seasonal migrations with their reindeer. For the Nenets who migrate up to the north the Yamal Peninsula, migrations can be as long as 1,000km each way. TUNDRA NENETS. With a population of over 41,000, the Nenets are one of the largest of the indigenous groups in Northern Siberia. Their territory covers a vast area that stretches from the Kanin Peninsula at the White Sea in the west, all the way to the Taymyr Peninsula, a distance of more than 2,000 km.', 'The Yamal-Nenets are a nomadic people and herders of reindeer in the vast northern reaches of Siberia. They live in Chums (pronounced chooms), which are large Tipi-like tents made from tall poles and reindeer hides. “…The traditional chum consists of reindeer hides sewn together and wrapped around wooden poles that are organized in a circle…” Wikipedia – Chum. On its right ...</code> | <code>[1, 0, 0, 0, 0, ...]</code> |
|
| 279 |
+
| <code>does the draft include women</code> | <code>["Based on our numbers for a worst-case scenario, if we had to do such a draft, it would include women or you just won't get the numbers of health care workers you needed, said Lew Brodsky, director of congressional and government affairs for the SSS. But I think culturally there might be a lot of negative feelings among political parts of the spectrum, and possibly more among men than women.. Can't Live With Them, Can't Live Without Them. Health care workers were part of the draft between 1950 and 1973 to fight the Korean and Vietnam wars.", 'Females may be included in the Selective Service and qualify for a potential draft should one be ordered by the president, Defense Secretary Leon Panetta said. In doing so, it may force Congress or the president to include women or scrap the Selective Service, analysts said. “That, frankly, could be true,” Nancy Duff Campbell, co-president of the National Women’s Law Center in Washington, D.C., told Military.com.', "Now that women can serve in co...</code> | <code>[1, 0, 0, 0, 0, ...]</code> |
|
| 280 |
+
* Loss: [<code>ListNetLoss</code>](https://sbert.net/docs/package_reference/cross_encoder/losses.html#listnetloss) with these parameters:
|
| 281 |
+
```json
|
| 282 |
+
{
|
| 283 |
+
"activation_fn": "torch.nn.modules.linear.Identity",
|
| 284 |
+
"mini_batch_size": 16
|
| 285 |
+
}
|
| 286 |
+
```
|
| 287 |
+
|
| 288 |
+
### Training Hyperparameters
|
| 289 |
+
#### Non-Default Hyperparameters
|
| 290 |
+
|
| 291 |
+
- `eval_strategy`: steps
|
| 292 |
+
- `per_device_train_batch_size`: 16
|
| 293 |
+
- `per_device_eval_batch_size`: 16
|
| 294 |
+
- `learning_rate`: 2e-05
|
| 295 |
+
- `num_train_epochs`: 5
|
| 296 |
+
- `seed`: 12
|
| 297 |
+
- `bf16`: True
|
| 298 |
+
- `load_best_model_at_end`: True
|
| 299 |
+
|
| 300 |
+
#### All Hyperparameters
|
| 301 |
+
<details><summary>Click to expand</summary>
|
| 302 |
+
|
| 303 |
+
- `overwrite_output_dir`: False
|
| 304 |
+
- `do_predict`: False
|
| 305 |
+
- `eval_strategy`: steps
|
| 306 |
+
- `prediction_loss_only`: True
|
| 307 |
+
- `per_device_train_batch_size`: 16
|
| 308 |
+
- `per_device_eval_batch_size`: 16
|
| 309 |
+
- `per_gpu_train_batch_size`: None
|
| 310 |
+
- `per_gpu_eval_batch_size`: None
|
| 311 |
+
- `gradient_accumulation_steps`: 1
|
| 312 |
+
- `eval_accumulation_steps`: None
|
| 313 |
+
- `torch_empty_cache_steps`: None
|
| 314 |
+
- `learning_rate`: 2e-05
|
| 315 |
+
- `weight_decay`: 0.0
|
| 316 |
+
- `adam_beta1`: 0.9
|
| 317 |
+
- `adam_beta2`: 0.999
|
| 318 |
+
- `adam_epsilon`: 1e-08
|
| 319 |
+
- `max_grad_norm`: 1.0
|
| 320 |
+
- `num_train_epochs`: 5
|
| 321 |
+
- `max_steps`: -1
|
| 322 |
+
- `lr_scheduler_type`: linear
|
| 323 |
+
- `lr_scheduler_kwargs`: {}
|
| 324 |
+
- `warmup_ratio`: 0.0
|
| 325 |
+
- `warmup_steps`: 0
|
| 326 |
+
- `log_level`: passive
|
| 327 |
+
- `log_level_replica`: warning
|
| 328 |
+
- `log_on_each_node`: True
|
| 329 |
+
- `logging_nan_inf_filter`: True
|
| 330 |
+
- `save_safetensors`: True
|
| 331 |
+
- `save_on_each_node`: False
|
| 332 |
+
- `save_only_model`: False
|
| 333 |
+
- `restore_callback_states_from_checkpoint`: False
|
| 334 |
+
- `no_cuda`: False
|
| 335 |
+
- `use_cpu`: False
|
| 336 |
+
- `use_mps_device`: False
|
| 337 |
+
- `seed`: 12
|
| 338 |
+
- `data_seed`: None
|
| 339 |
+
- `jit_mode_eval`: False
|
| 340 |
+
- `use_ipex`: False
|
| 341 |
+
- `bf16`: True
|
| 342 |
+
- `fp16`: False
|
| 343 |
+
- `fp16_opt_level`: O1
|
| 344 |
+
- `half_precision_backend`: auto
|
| 345 |
+
- `bf16_full_eval`: False
|
| 346 |
+
- `fp16_full_eval`: False
|
| 347 |
+
- `tf32`: None
|
| 348 |
+
- `local_rank`: 0
|
| 349 |
+
- `ddp_backend`: None
|
| 350 |
+
- `tpu_num_cores`: None
|
| 351 |
+
- `tpu_metrics_debug`: False
|
| 352 |
+
- `debug`: []
|
| 353 |
+
- `dataloader_drop_last`: False
|
| 354 |
+
- `dataloader_num_workers`: 0
|
| 355 |
+
- `dataloader_prefetch_factor`: None
|
| 356 |
+
- `past_index`: -1
|
| 357 |
+
- `disable_tqdm`: False
|
| 358 |
+
- `remove_unused_columns`: True
|
| 359 |
+
- `label_names`: None
|
| 360 |
+
- `load_best_model_at_end`: True
|
| 361 |
+
- `ignore_data_skip`: False
|
| 362 |
+
- `fsdp`: []
|
| 363 |
+
- `fsdp_min_num_params`: 0
|
| 364 |
+
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
|
| 365 |
+
- `fsdp_transformer_layer_cls_to_wrap`: None
|
| 366 |
+
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
|
| 367 |
+
- `deepspeed`: None
|
| 368 |
+
- `label_smoothing_factor`: 0.0
|
| 369 |
+
- `optim`: adamw_torch
|
| 370 |
+
- `optim_args`: None
|
| 371 |
+
- `adafactor`: False
|
| 372 |
+
- `group_by_length`: False
|
| 373 |
+
- `length_column_name`: length
|
| 374 |
+
- `ddp_find_unused_parameters`: None
|
| 375 |
+
- `ddp_bucket_cap_mb`: None
|
| 376 |
+
- `ddp_broadcast_buffers`: False
|
| 377 |
+
- `dataloader_pin_memory`: True
|
| 378 |
+
- `dataloader_persistent_workers`: False
|
| 379 |
+
- `skip_memory_metrics`: True
|
| 380 |
+
- `use_legacy_prediction_loop`: False
|
| 381 |
+
- `push_to_hub`: False
|
| 382 |
+
- `resume_from_checkpoint`: None
|
| 383 |
+
- `hub_model_id`: None
|
| 384 |
+
- `hub_strategy`: every_save
|
| 385 |
+
- `hub_private_repo`: None
|
| 386 |
+
- `hub_always_push`: False
|
| 387 |
+
- `hub_revision`: None
|
| 388 |
+
- `gradient_checkpointing`: False
|
| 389 |
+
- `gradient_checkpointing_kwargs`: None
|
| 390 |
+
- `include_inputs_for_metrics`: False
|
| 391 |
+
- `include_for_metrics`: []
|
| 392 |
+
- `eval_do_concat_batches`: True
|
| 393 |
+
- `fp16_backend`: auto
|
| 394 |
+
- `push_to_hub_model_id`: None
|
| 395 |
+
- `push_to_hub_organization`: None
|
| 396 |
+
- `mp_parameters`:
|
| 397 |
+
- `auto_find_batch_size`: False
|
| 398 |
+
- `full_determinism`: False
|
| 399 |
+
- `torchdynamo`: None
|
| 400 |
+
- `ray_scope`: last
|
| 401 |
+
- `ddp_timeout`: 1800
|
| 402 |
+
- `torch_compile`: False
|
| 403 |
+
- `torch_compile_backend`: None
|
| 404 |
+
- `torch_compile_mode`: None
|
| 405 |
+
- `include_tokens_per_second`: False
|
| 406 |
+
- `include_num_input_tokens_seen`: False
|
| 407 |
+
- `neftune_noise_alpha`: None
|
| 408 |
+
- `optim_target_modules`: None
|
| 409 |
+
- `batch_eval_metrics`: False
|
| 410 |
+
- `eval_on_start`: False
|
| 411 |
+
- `use_liger_kernel`: False
|
| 412 |
+
- `liger_kernel_config`: None
|
| 413 |
+
- `eval_use_gather_object`: False
|
| 414 |
+
- `average_tokens_across_devices`: False
|
| 415 |
+
- `prompts`: None
|
| 416 |
+
- `batch_sampler`: batch_sampler
|
| 417 |
+
- `multi_dataset_batch_sampler`: proportional
|
| 418 |
+
- `router_mapping`: {}
|
| 419 |
+
- `learning_rate_mapping`: {}
|
| 420 |
+
|
| 421 |
+
</details>
|
| 422 |
+
|
| 423 |
+
### Training Logs
|
| 424 |
+
<details><summary>Click to expand</summary>
|
| 425 |
+
|
| 426 |
+
| Epoch | Step | Training Loss | Validation Loss | NanoMSMARCO_R100_ndcg@10 | NanoNFCorpus_R100_ndcg@10 | NanoNQ_R100_ndcg@10 | NanoBEIR_R100_mean_ndcg@10 |
|
| 427 |
+
|:----------:|:--------:|:-------------:|:---------------:|:------------------------:|:-------------------------:|:--------------------:|:--------------------------:|
|
| 428 |
+
| -1 | -1 | - | - | 0.0376 (-0.5028) | 0.2282 (-0.0968) | 0.0589 (-0.4418) | 0.1082 (-0.3471) |
|
| 429 |
+
| 0.0002 | 1 | 2.7963 | - | - | - | - | - |
|
| 430 |
+
| 0.0203 | 100 | 2.1145 | 2.0870 | 0.0684 (-0.4720) | 0.2264 (-0.0986) | 0.0688 (-0.4318) | 0.1212 (-0.3341) |
|
| 431 |
+
| 0.0407 | 200 | 2.0886 | 2.0854 | 0.1009 (-0.4395) | 0.2918 (-0.0333) | 0.0767 (-0.4239) | 0.1565 (-0.2989) |
|
| 432 |
+
| 0.0610 | 300 | 2.0848 | 2.0833 | 0.1941 (-0.3464) | 0.2859 (-0.0391) | 0.2033 (-0.2973) | 0.2278 (-0.2276) |
|
| 433 |
+
| 0.0813 | 400 | 2.0868 | 2.0803 | 0.3491 (-0.1914) | 0.3215 (-0.0036) | 0.3451 (-0.1555) | 0.3386 (-0.1168) |
|
| 434 |
+
| 0.1016 | 500 | 2.0738 | 2.0791 | 0.4083 (-0.1321) | 0.3161 (-0.0089) | 0.2742 (-0.2265) | 0.3329 (-0.1225) |
|
| 435 |
+
| 0.1220 | 600 | 2.0713 | 2.0782 | 0.3870 (-0.1535) | 0.3123 (-0.0128) | 0.4370 (-0.0636) | 0.3787 (-0.0766) |
|
| 436 |
+
| 0.1423 | 700 | 2.0773 | 2.0774 | 0.4714 (-0.0690) | 0.3296 (+0.0045) | 0.4619 (-0.0387) | 0.4209 (-0.0344) |
|
| 437 |
+
| 0.1626 | 800 | 2.0787 | 2.0773 | 0.4514 (-0.0891) | 0.3190 (-0.0060) | 0.3891 (-0.1115) | 0.3865 (-0.0689) |
|
| 438 |
+
| 0.1830 | 900 | 2.0752 | 2.0767 | 0.5139 (-0.0266) | 0.3340 (+0.0090) | 0.5190 (+0.0183) | 0.4556 (+0.0002) |
|
| 439 |
+
| 0.2033 | 1000 | 2.0708 | 2.0765 | 0.5015 (-0.0389) | 0.3230 (-0.0021) | 0.4995 (-0.0012) | 0.4413 (-0.0141) |
|
| 440 |
+
| 0.2236 | 1100 | 2.0783 | 2.0761 | 0.5063 (-0.0341) | 0.3357 (+0.0107) | 0.4689 (-0.0318) | 0.4370 (-0.0184) |
|
| 441 |
+
| 0.2440 | 1200 | 2.0787 | 2.0762 | 0.5090 (-0.0315) | 0.3288 (+0.0037) | 0.4525 (-0.0481) | 0.4301 (-0.0253) |
|
| 442 |
+
| 0.2643 | 1300 | 2.0766 | 2.0758 | 0.4617 (-0.0787) | 0.3072 (-0.0179) | 0.4633 (-0.0373) | 0.4107 (-0.0447) |
|
| 443 |
+
| 0.2846 | 1400 | 2.0844 | 2.0756 | 0.4658 (-0.0746) | 0.3033 (-0.0218) | 0.4700 (-0.0306) | 0.4130 (-0.0424) |
|
| 444 |
+
| 0.3049 | 1500 | 2.0761 | 2.0760 | 0.4577 (-0.0827) | 0.2975 (-0.0276) | 0.4457 (-0.0550) | 0.4003 (-0.0551) |
|
| 445 |
+
| 0.3253 | 1600 | 2.0808 | 2.0755 | 0.4687 (-0.0718) | 0.3286 (+0.0036) | 0.4494 (-0.0513) | 0.4155 (-0.0398) |
|
| 446 |
+
| 0.3456 | 1700 | 2.0709 | 2.0758 | 0.4946 (-0.0458) | 0.3453 (+0.0203) | 0.5006 (-0.0001) | 0.4468 (-0.0085) |
|
| 447 |
+
| 0.3659 | 1800 | 2.0726 | 2.0752 | 0.4985 (-0.0420) | 0.3204 (-0.0047) | 0.4795 (-0.0212) | 0.4328 (-0.0226) |
|
| 448 |
+
| 0.3863 | 1900 | 2.0742 | 2.0750 | 0.5397 (-0.0007) | 0.3118 (-0.0133) | 0.5320 (+0.0314) | 0.4612 (+0.0058) |
|
| 449 |
+
| 0.4066 | 2000 | 2.0774 | 2.0749 | 0.4848 (-0.0556) | 0.3152 (-0.0098) | 0.4612 (-0.0395) | 0.4204 (-0.0350) |
|
| 450 |
+
| 0.4269 | 2100 | 2.0702 | 2.0745 | 0.5070 (-0.0334) | 0.3170 (-0.0080) | 0.4962 (-0.0044) | 0.4401 (-0.0153) |
|
| 451 |
+
| 0.4472 | 2200 | 2.0819 | 2.0747 | 0.5502 (+0.0098) | 0.3376 (+0.0125) | 0.5164 (+0.0157) | 0.4681 (+0.0127) |
|
| 452 |
+
| 0.4676 | 2300 | 2.0843 | 2.0748 | 0.5100 (-0.0305) | 0.3142 (-0.0108) | 0.4956 (-0.0051) | 0.4399 (-0.0154) |
|
| 453 |
+
| 0.4879 | 2400 | 2.0634 | 2.0754 | 0.4906 (-0.0498) | 0.3158 (-0.0093) | 0.4698 (-0.0309) | 0.4254 (-0.0300) |
|
| 454 |
+
| 0.5082 | 2500 | 2.0633 | 2.0751 | 0.4884 (-0.0520) | 0.3159 (-0.0092) | 0.4937 (-0.0070) | 0.4327 (-0.0227) |
|
| 455 |
+
| 0.5286 | 2600 | 2.0679 | 2.0749 | 0.5111 (-0.0294) | 0.2948 (-0.0302) | 0.4877 (-0.0129) | 0.4312 (-0.0242) |
|
| 456 |
+
| 0.5489 | 2700 | 2.078 | 2.0742 | 0.5502 (+0.0098) | 0.2811 (-0.0439) | 0.4596 (-0.0410) | 0.4303 (-0.0251) |
|
| 457 |
+
| 0.5692 | 2800 | 2.0715 | 2.0740 | 0.5301 (-0.0103) | 0.2918 (-0.0332) | 0.4736 (-0.0271) | 0.4318 (-0.0235) |
|
| 458 |
+
| 0.5896 | 2900 | 2.0773 | 2.0739 | 0.4941 (-0.0464) | 0.3095 (-0.0155) | 0.4572 (-0.0434) | 0.4203 (-0.0351) |
|
| 459 |
+
| 0.6099 | 3000 | 2.074 | 2.0741 | 0.5228 (-0.0177) | 0.3135 (-0.0115) | 0.5534 (+0.0528) | 0.4632 (+0.0079) |
|
| 460 |
+
| 0.6302 | 3100 | 2.0782 | 2.0744 | 0.5411 (+0.0007) | 0.3301 (+0.0050) | 0.4187 (-0.0820) | 0.4300 (-0.0254) |
|
| 461 |
+
| 0.6505 | 3200 | 2.0695 | 2.0743 | 0.5349 (-0.0056) | 0.3122 (-0.0129) | 0.4977 (-0.0029) | 0.4483 (-0.0071) |
|
| 462 |
+
| 0.6709 | 3300 | 2.0696 | 2.0740 | 0.5406 (+0.0002) | 0.3174 (-0.0076) | 0.5236 (+0.0229) | 0.4605 (+0.0052) |
|
| 463 |
+
| 0.6912 | 3400 | 2.0802 | 2.0735 | 0.5588 (+0.0183) | 0.3232 (-0.0018) | 0.4908 (-0.0099) | 0.4576 (+0.0022) |
|
| 464 |
+
| 0.7115 | 3500 | 2.069 | 2.0735 | 0.5023 (-0.0381) | 0.3388 (+0.0138) | 0.5123 (+0.0117) | 0.4511 (-0.0042) |
|
| 465 |
+
| 0.7319 | 3600 | 2.0745 | 2.0736 | 0.5291 (-0.0113) | 0.3195 (-0.0056) | 0.5064 (+0.0057) | 0.4517 (-0.0037) |
|
| 466 |
+
| 0.7522 | 3700 | 2.073 | 2.0734 | 0.5346 (-0.0059) | 0.3209 (-0.0042) | 0.5047 (+0.0041) | 0.4534 (-0.0020) |
|
| 467 |
+
| 0.7725 | 3800 | 2.0747 | 2.0737 | 0.5076 (-0.0328) | 0.3164 (-0.0086) | 0.5414 (+0.0407) | 0.4551 (-0.0002) |
|
| 468 |
+
| 0.7928 | 3900 | 2.076 | 2.0733 | 0.4968 (-0.0437) | 0.3312 (+0.0061) | 0.4606 (-0.0400) | 0.4295 (-0.0259) |
|
| 469 |
+
| 0.8132 | 4000 | 2.0817 | 2.0735 | 0.4961 (-0.0443) | 0.3446 (+0.0196) | 0.4805 (-0.0202) | 0.4404 (-0.0150) |
|
| 470 |
+
| 0.8335 | 4100 | 2.0713 | 2.0733 | 0.5315 (-0.0089) | 0.3370 (+0.0120) | 0.5393 (+0.0386) | 0.4693 (+0.0139) |
|
| 471 |
+
| 0.8538 | 4200 | 2.0718 | 2.0735 | 0.4998 (-0.0406) | 0.3226 (-0.0025) | 0.5128 (+0.0121) | 0.4451 (-0.0103) |
|
| 472 |
+
| 0.8742 | 4300 | 2.0626 | 2.0730 | 0.5063 (-0.0341) | 0.3376 (+0.0126) | 0.5289 (+0.0282) | 0.4576 (+0.0022) |
|
| 473 |
+
| 0.8945 | 4400 | 2.0721 | 2.0731 | 0.4800 (-0.0604) | 0.3110 (-0.0141) | 0.5302 (+0.0295) | 0.4404 (-0.0150) |
|
| 474 |
+
| 0.9148 | 4500 | 2.0751 | 2.0731 | 0.4781 (-0.0624) | 0.3241 (-0.0010) | 0.5010 (+0.0003) | 0.4344 (-0.0210) |
|
| 475 |
+
| 0.9351 | 4600 | 2.0716 | 2.0727 | 0.4995 (-0.0409) | 0.3046 (-0.0205) | 0.5238 (+0.0231) | 0.4426 (-0.0127) |
|
| 476 |
+
| 0.9555 | 4700 | 2.0658 | 2.0729 | 0.4740 (-0.0664) | 0.3319 (+0.0069) | 0.5070 (+0.0063) | 0.4376 (-0.0177) |
|
| 477 |
+
| 0.9758 | 4800 | 2.0814 | 2.0731 | 0.5199 (-0.0205) | 0.3372 (+0.0121) | 0.4906 (-0.0101) | 0.4492 (-0.0061) |
|
| 478 |
+
| 0.9961 | 4900 | 2.0752 | 2.0728 | 0.5175 (-0.0230) | 0.3347 (+0.0097) | 0.5366 (+0.0359) | 0.4629 (+0.0075) |
|
| 479 |
+
| 1.0165 | 5000 | 2.07 | 2.0732 | 0.5194 (-0.0210) | 0.3356 (+0.0105) | 0.5315 (+0.0308) | 0.4622 (+0.0068) |
|
| 480 |
+
| 1.0368 | 5100 | 2.067 | 2.0730 | 0.4784 (-0.0620) | 0.3256 (+0.0005) | 0.5260 (+0.0254) | 0.4433 (-0.0120) |
|
| 481 |
+
| 1.0571 | 5200 | 2.0633 | 2.0729 | 0.4808 (-0.0597) | 0.3371 (+0.0120) | 0.4634 (-0.0372) | 0.4271 (-0.0283) |
|
| 482 |
+
| 1.0775 | 5300 | 2.0699 | 2.0728 | 0.4909 (-0.0495) | 0.3280 (+0.0029) | 0.5489 (+0.0483) | 0.4559 (+0.0006) |
|
| 483 |
+
| 1.0978 | 5400 | 2.0734 | 2.0733 | 0.4490 (-0.0914) | 0.3351 (+0.0100) | 0.5282 (+0.0276) | 0.4374 (-0.0179) |
|
| 484 |
+
| 1.1181 | 5500 | 2.0673 | 2.0737 | 0.4821 (-0.0583) | 0.3540 (+0.0289) | 0.5481 (+0.0475) | 0.4614 (+0.0060) |
|
| 485 |
+
| 1.1384 | 5600 | 2.0729 | 2.0738 | 0.5275 (-0.0129) | 0.3228 (-0.0023) | 0.5187 (+0.0180) | 0.4563 (+0.0009) |
|
| 486 |
+
| 1.1588 | 5700 | 2.0699 | 2.0742 | 0.5283 (-0.0121) | 0.3190 (-0.0060) | 0.5340 (+0.0334) | 0.4604 (+0.0051) |
|
| 487 |
+
| 1.1791 | 5800 | 2.0637 | 2.0727 | 0.5147 (-0.0257) | 0.3491 (+0.0240) | 0.5184 (+0.0177) | 0.4607 (+0.0054) |
|
| 488 |
+
| 1.1994 | 5900 | 2.0752 | 2.0734 | 0.5300 (-0.0104) | 0.2904 (-0.0347) | 0.5503 (+0.0496) | 0.4569 (+0.0015) |
|
| 489 |
+
| 1.2198 | 6000 | 2.0731 | 2.0727 | 0.5173 (-0.0231) | 0.2854 (-0.0396) | 0.5343 (+0.0336) | 0.4457 (-0.0097) |
|
| 490 |
+
| 1.2401 | 6100 | 2.0713 | 2.0732 | 0.4748 (-0.0656) | 0.2927 (-0.0324) | 0.5062 (+0.0055) | 0.4246 (-0.0308) |
|
| 491 |
+
| 1.2604 | 6200 | 2.0716 | 2.0723 | 0.4342 (-0.1063) | 0.3092 (-0.0159) | 0.5108 (+0.0101) | 0.4180 (-0.0373) |
|
| 492 |
+
| 1.2807 | 6300 | 2.0649 | 2.0729 | 0.5001 (-0.0403) | 0.3297 (+0.0046) | 0.5548 (+0.0541) | 0.4615 (+0.0062) |
|
| 493 |
+
| 1.3011 | 6400 | 2.079 | 2.0731 | 0.4775 (-0.0629) | 0.3333 (+0.0082) | 0.5325 (+0.0319) | 0.4478 (-0.0076) |
|
| 494 |
+
| 1.3214 | 6500 | 2.0719 | 2.0731 | 0.4518 (-0.0887) | 0.3119 (-0.0131) | 0.4969 (-0.0037) | 0.4202 (-0.0352) |
|
| 495 |
+
| 1.3417 | 6600 | 2.0667 | 2.0744 | 0.4899 (-0.0505) | 0.3422 (+0.0172) | 0.5327 (+0.0320) | 0.4549 (-0.0004) |
|
| 496 |
+
| 1.3621 | 6700 | 2.0592 | 2.0735 | 0.4711 (-0.0693) | 0.3202 (-0.0049) | 0.5357 (+0.0350) | 0.4423 (-0.0131) |
|
| 497 |
+
| 1.3824 | 6800 | 2.0716 | 2.0735 | 0.4858 (-0.0546) | 0.3707 (+0.0457) | 0.5436 (+0.0429) | 0.4667 (+0.0113) |
|
| 498 |
+
| 1.4027 | 6900 | 2.0745 | 2.0735 | 0.4789 (-0.0615) | 0.3401 (+0.0151) | 0.4967 (-0.0040) | 0.4386 (-0.0168) |
|
| 499 |
+
| 1.4231 | 7000 | 2.0752 | 2.0737 | 0.4825 (-0.0579) | 0.3319 (+0.0069) | 0.5618 (+0.0611) | 0.4588 (+0.0034) |
|
| 500 |
+
| 1.4434 | 7100 | 2.0693 | 2.0734 | 0.4729 (-0.0675) | 0.3591 (+0.0341) | 0.4829 (-0.0178) | 0.4383 (-0.0171) |
|
| 501 |
+
| 1.4637 | 7200 | 2.0618 | 2.0741 | 0.4753 (-0.0651) | 0.3582 (+0.0332) | 0.4849 (-0.0158) | 0.4395 (-0.0159) |
|
| 502 |
+
| 1.4840 | 7300 | 2.0713 | 2.0746 | 0.4826 (-0.0578) | 0.3443 (+0.0193) | 0.5181 (+0.0175) | 0.4483 (-0.0070) |
|
| 503 |
+
| 1.5044 | 7400 | 2.0617 | 2.0751 | 0.5096 (-0.0309) | 0.3586 (+0.0335) | 0.5143 (+0.0136) | 0.4608 (+0.0054) |
|
| 504 |
+
| 1.5247 | 7500 | 2.0721 | 2.0733 | 0.4808 (-0.0597) | 0.3472 (+0.0221) | 0.4730 (-0.0277) | 0.4336 (-0.0217) |
|
| 505 |
+
| 1.5450 | 7600 | 2.0693 | 2.0733 | 0.4759 (-0.0645) | 0.3290 (+0.0039) | 0.4984 (-0.0022) | 0.4344 (-0.0209) |
|
| 506 |
+
| 1.5654 | 7700 | 2.0702 | 2.0732 | 0.4611 (-0.0793) | 0.3515 (+0.0264) | 0.4963 (-0.0043) | 0.4363 (-0.0191) |
|
| 507 |
+
| 1.5857 | 7800 | 2.0649 | 2.0728 | 0.4951 (-0.0453) | 0.3599 (+0.0348) | 0.5251 (+0.0245) | 0.4600 (+0.0047) |
|
| 508 |
+
| 1.6060 | 7900 | 2.0647 | 2.0731 | 0.4860 (-0.0544) | 0.3500 (+0.0250) | 0.5253 (+0.0246) | 0.4538 (-0.0016) |
|
| 509 |
+
| **1.6263** | **8000** | **2.0681** | **2.0731** | **0.5348 (-0.0056)** | **0.3376 (+0.0126)** | **0.5534 (+0.0528)** | **0.4753 (+0.0199)** |
|
| 510 |
+
| 1.6467 | 8100 | 2.0681 | 2.0730 | 0.4773 (-0.0632) | 0.3389 (+0.0139) | 0.4954 (-0.0052) | 0.4372 (-0.0182) |
|
| 511 |
+
| 1.6670 | 8200 | 2.0557 | 2.0728 | 0.4691 (-0.0713) | 0.3126 (-0.0125) | 0.5073 (+0.0067) | 0.4297 (-0.0257) |
|
| 512 |
+
| 1.6873 | 8300 | 2.0599 | 2.0735 | 0.4843 (-0.0561) | 0.3188 (-0.0062) | 0.5367 (+0.0361) | 0.4466 (-0.0087) |
|
| 513 |
+
| 1.7077 | 8400 | 2.0629 | 2.0730 | 0.4820 (-0.0584) | 0.3386 (+0.0135) | 0.4855 (-0.0151) | 0.4354 (-0.0200) |
|
| 514 |
+
| 1.7280 | 8500 | 2.0563 | 2.0732 | 0.4585 (-0.0819) | 0.3454 (+0.0203) | 0.5089 (+0.0083) | 0.4376 (-0.0178) |
|
| 515 |
+
| 1.7483 | 8600 | 2.0647 | 2.0731 | 0.4738 (-0.0666) | 0.3337 (+0.0087) | 0.5027 (+0.0020) | 0.4367 (-0.0186) |
|
| 516 |
+
| 1.7687 | 8700 | 2.0631 | 2.0733 | 0.4690 (-0.0714) | 0.3416 (+0.0165) | 0.5090 (+0.0083) | 0.4398 (-0.0155) |
|
| 517 |
+
| 1.7890 | 8800 | 2.0682 | 2.0733 | 0.4720 (-0.0685) | 0.3311 (+0.0061) | 0.5388 (+0.0382) | 0.4473 (-0.0081) |
|
| 518 |
+
| 1.8093 | 8900 | 2.0547 | 2.0728 | 0.4871 (-0.0533) | 0.3332 (+0.0082) | 0.5293 (+0.0286) | 0.4499 (-0.0055) |
|
| 519 |
+
| 1.8296 | 9000 | 2.0667 | 2.0731 | 0.5003 (-0.0401) | 0.3305 (+0.0054) | 0.4953 (-0.0053) | 0.4420 (-0.0133) |
|
| 520 |
+
| 1.8500 | 9100 | 2.0588 | 2.0731 | 0.5349 (-0.0055) | 0.3523 (+0.0272) | 0.5115 (+0.0108) | 0.4662 (+0.0109) |
|
| 521 |
+
| 1.8703 | 9200 | 2.0591 | 2.0736 | 0.4988 (-0.0417) | 0.3434 (+0.0183) | 0.5140 (+0.0134) | 0.4521 (-0.0033) |
|
| 522 |
+
| 1.8906 | 9300 | 2.0666 | 2.0741 | 0.4989 (-0.0416) | 0.3505 (+0.0255) | 0.5558 (+0.0552) | 0.4684 (+0.0130) |
|
| 523 |
+
| 1.9110 | 9400 | 2.0684 | 2.0737 | 0.4941 (-0.0463) | 0.3548 (+0.0298) | 0.5593 (+0.0587) | 0.4694 (+0.0140) |
|
| 524 |
+
| 1.9313 | 9500 | 2.0706 | 2.0732 | 0.5112 (-0.0292) | 0.3536 (+0.0286) | 0.5367 (+0.0360) | 0.4672 (+0.0118) |
|
| 525 |
+
| 1.9516 | 9600 | 2.076 | 2.0731 | 0.4762 (-0.0642) | 0.3367 (+0.0117) | 0.5014 (+0.0008) | 0.4381 (-0.0172) |
|
| 526 |
+
| 1.9719 | 9700 | 2.0686 | 2.0735 | 0.4828 (-0.0577) | 0.3201 (-0.0050) | 0.5317 (+0.0310) | 0.4448 (-0.0105) |
|
| 527 |
+
| 1.9923 | 9800 | 2.0685 | 2.0731 | 0.4866 (-0.0538) | 0.3322 (+0.0072) | 0.5527 (+0.0520) | 0.4572 (+0.0018) |
|
| 528 |
+
| 2.0126 | 9900 | 2.0659 | 2.0744 | 0.4655 (-0.0749) | 0.3583 (+0.0333) | 0.5306 (+0.0299) | 0.4515 (-0.0039) |
|
| 529 |
+
| 2.0329 | 10000 | 2.057 | 2.0743 | 0.4423 (-0.0981) | 0.3313 (+0.0063) | 0.5204 (+0.0198) | 0.4314 (-0.0240) |
|
| 530 |
+
| 2.0533 | 10100 | 2.0551 | 2.0751 | 0.4247 (-0.1157) | 0.3349 (+0.0099) | 0.5259 (+0.0252) | 0.4285 (-0.0269) |
|
| 531 |
+
| 2.0736 | 10200 | 2.0609 | 2.0763 | 0.4395 (-0.1009) | 0.3424 (+0.0174) | 0.5479 (+0.0473) | 0.4433 (-0.0121) |
|
| 532 |
+
| 2.0939 | 10300 | 2.0467 | 2.0758 | 0.4182 (-0.1222) | 0.3349 (+0.0099) | 0.5230 (+0.0224) | 0.4254 (-0.0300) |
|
| 533 |
+
| 2.1143 | 10400 | 2.0578 | 2.0760 | 0.4218 (-0.1187) | 0.3301 (+0.0051) | 0.5107 (+0.0101) | 0.4209 (-0.0345) |
|
| 534 |
+
| 2.1346 | 10500 | 2.0549 | 2.0755 | 0.4439 (-0.0966) | 0.3400 (+0.0150) | 0.5085 (+0.0078) | 0.4308 (-0.0246) |
|
| 535 |
+
| 2.1549 | 10600 | 2.0538 | 2.0761 | 0.4437 (-0.0967) | 0.3527 (+0.0277) | 0.5051 (+0.0045) | 0.4338 (-0.0215) |
|
| 536 |
+
| 2.1752 | 10700 | 2.0626 | 2.0755 | 0.4528 (-0.0876) | 0.3517 (+0.0266) | 0.5538 (+0.0532) | 0.4528 (-0.0026) |
|
| 537 |
+
| 2.1956 | 10800 | 2.0543 | 2.0754 | 0.4374 (-0.1030) | 0.3586 (+0.0336) | 0.5347 (+0.0341) | 0.4436 (-0.0118) |
|
| 538 |
+
| 2.2159 | 10900 | 2.0636 | 2.0762 | 0.4296 (-0.1109) | 0.3473 (+0.0222) | 0.5240 (+0.0234) | 0.4336 (-0.0218) |
|
| 539 |
+
| 2.2362 | 11000 | 2.0584 | 2.0762 | 0.4410 (-0.0995) | 0.3428 (+0.0177) | 0.5112 (+0.0105) | 0.4316 (-0.0237) |
|
| 540 |
+
| 2.2566 | 11100 | 2.0562 | 2.0756 | 0.4393 (-0.1011) | 0.3392 (+0.0142) | 0.5114 (+0.0108) | 0.4300 (-0.0254) |
|
| 541 |
+
| 2.2769 | 11200 | 2.0563 | 2.0776 | 0.4329 (-0.1075) | 0.3552 (+0.0301) | 0.5034 (+0.0027) | 0.4305 (-0.0249) |
|
| 542 |
+
| 2.2972 | 11300 | 2.0534 | 2.0762 | 0.4060 (-0.1345) | 0.3490 (+0.0240) | 0.5336 (+0.0330) | 0.4295 (-0.0258) |
|
| 543 |
+
| 2.3175 | 11400 | 2.0626 | 2.0769 | 0.4111 (-0.1293) | 0.3346 (+0.0095) | 0.5408 (+0.0402) | 0.4288 (-0.0266) |
|
| 544 |
+
| 2.3379 | 11500 | 2.0621 | 2.0764 | 0.4310 (-0.1094) | 0.3317 (+0.0067) | 0.5368 (+0.0361) | 0.4332 (-0.0222) |
|
| 545 |
+
| 2.3582 | 11600 | 2.0647 | 2.0765 | 0.4479 (-0.0925) | 0.3271 (+0.0021) | 0.5405 (+0.0398) | 0.4385 (-0.0169) |
|
| 546 |
+
| 2.3785 | 11700 | 2.0625 | 2.0767 | 0.4282 (-0.1122) | 0.3414 (+0.0164) | 0.5072 (+0.0066) | 0.4256 (-0.0298) |
|
| 547 |
+
| 2.3989 | 11800 | 2.054 | 2.0778 | 0.4244 (-0.1160) | 0.3420 (+0.0170) | 0.5139 (+0.0133) | 0.4268 (-0.0286) |
|
| 548 |
+
| 2.4192 | 11900 | 2.0559 | 2.0756 | 0.4251 (-0.1153) | 0.3303 (+0.0053) | 0.5061 (+0.0055) | 0.4205 (-0.0349) |
|
| 549 |
+
| 2.4395 | 12000 | 2.0612 | 2.0758 | 0.4456 (-0.0948) | 0.3406 (+0.0155) | 0.5353 (+0.0347) | 0.4405 (-0.0149) |
|
| 550 |
+
| 2.4598 | 12100 | 2.0574 | 2.0770 | 0.4485 (-0.0919) | 0.3359 (+0.0109) | 0.5220 (+0.0214) | 0.4355 (-0.0199) |
|
| 551 |
+
| 2.4802 | 12200 | 2.0598 | 2.0751 | 0.4582 (-0.0823) | 0.3468 (+0.0218) | 0.5405 (+0.0398) | 0.4485 (-0.0069) |
|
| 552 |
+
| 2.5005 | 12300 | 2.0578 | 2.0764 | 0.4600 (-0.0804) | 0.3446 (+0.0195) | 0.5276 (+0.0270) | 0.4441 (-0.0113) |
|
| 553 |
+
| 2.5208 | 12400 | 2.0493 | 2.0759 | 0.4580 (-0.0824) | 0.3220 (-0.0030) | 0.5147 (+0.0140) | 0.4316 (-0.0238) |
|
| 554 |
+
| 2.5412 | 12500 | 2.0574 | 2.0767 | 0.4741 (-0.0663) | 0.3381 (+0.0131) | 0.5110 (+0.0104) | 0.4411 (-0.0143) |
|
| 555 |
+
| 2.5615 | 12600 | 2.0598 | 2.0768 | 0.4541 (-0.0863) | 0.3234 (-0.0016) | 0.5136 (+0.0129) | 0.4304 (-0.0250) |
|
| 556 |
+
| 2.5818 | 12700 | 2.0715 | 2.0763 | 0.4693 (-0.0711) | 0.3467 (+0.0217) | 0.5193 (+0.0187) | 0.4451 (-0.0103) |
|
| 557 |
+
| 2.6022 | 12800 | 2.0566 | 2.0763 | 0.4747 (-0.0657) | 0.3480 (+0.0230) | 0.5208 (+0.0202) | 0.4478 (-0.0075) |
|
| 558 |
+
| 2.6225 | 12900 | 2.0607 | 2.0761 | 0.4511 (-0.0894) | 0.3319 (+0.0069) | 0.4955 (-0.0051) | 0.4262 (-0.0292) |
|
| 559 |
+
| 2.6428 | 13000 | 2.0574 | 2.0752 | 0.4574 (-0.0831) | 0.3564 (+0.0313) | 0.4903 (-0.0104) | 0.4347 (-0.0207) |
|
| 560 |
+
| 2.6631 | 13100 | 2.0625 | 2.0772 | 0.4609 (-0.0796) | 0.3451 (+0.0201) | 0.5293 (+0.0287) | 0.4451 (-0.0103) |
|
| 561 |
+
| 2.6835 | 13200 | 2.054 | 2.0770 | 0.4526 (-0.0878) | 0.3378 (+0.0128) | 0.5307 (+0.0300) | 0.4404 (-0.0150) |
|
| 562 |
+
| 2.7038 | 13300 | 2.0601 | 2.0769 | 0.4493 (-0.0912) | 0.3382 (+0.0132) | 0.5327 (+0.0320) | 0.4400 (-0.0153) |
|
| 563 |
+
| 2.7241 | 13400 | 2.0557 | 2.0758 | 0.4387 (-0.1018) | 0.3422 (+0.0171) | 0.5319 (+0.0312) | 0.4376 (-0.0178) |
|
| 564 |
+
| 2.7445 | 13500 | 2.0509 | 2.0770 | 0.4426 (-0.0978) | 0.3303 (+0.0053) | 0.4979 (-0.0028) | 0.4236 (-0.0318) |
|
| 565 |
+
| 2.7648 | 13600 | 2.0502 | 2.0769 | 0.4530 (-0.0874) | 0.3406 (+0.0156) | 0.5257 (+0.0250) | 0.4398 (-0.0156) |
|
| 566 |
+
| 2.7851 | 13700 | 2.0585 | 2.0760 | 0.4333 (-0.1072) | 0.3447 (+0.0196) | 0.5122 (+0.0116) | 0.4300 (-0.0253) |
|
| 567 |
+
| 2.8054 | 13800 | 2.0602 | 2.0761 | 0.4400 (-0.1004) | 0.3388 (+0.0138) | 0.5119 (+0.0113) | 0.4303 (-0.0251) |
|
| 568 |
+
| 2.8258 | 13900 | 2.0523 | 2.0782 | 0.4115 (-0.1289) | 0.3512 (+0.0261) | 0.5118 (+0.0111) | 0.4248 (-0.0305) |
|
| 569 |
+
| 2.8461 | 14000 | 2.0546 | 2.0771 | 0.4530 (-0.0874) | 0.3560 (+0.0309) | 0.5273 (+0.0267) | 0.4454 (-0.0099) |
|
| 570 |
+
| 2.8664 | 14100 | 2.0515 | 2.0764 | 0.4159 (-0.1245) | 0.3488 (+0.0238) | 0.5244 (+0.0238) | 0.4297 (-0.0257) |
|
| 571 |
+
| 2.8868 | 14200 | 2.0548 | 2.0757 | 0.4290 (-0.1115) | 0.3576 (+0.0326) | 0.5564 (+0.0558) | 0.4477 (-0.0077) |
|
| 572 |
+
| 2.9071 | 14300 | 2.0531 | 2.0759 | 0.4341 (-0.1063) | 0.3574 (+0.0323) | 0.5224 (+0.0218) | 0.4380 (-0.0174) |
|
| 573 |
+
| 2.9274 | 14400 | 2.0682 | 2.0766 | 0.4335 (-0.1069) | 0.3535 (+0.0285) | 0.5039 (+0.0032) | 0.4303 (-0.0251) |
|
| 574 |
+
| 2.9478 | 14500 | 2.0547 | 2.0769 | 0.4308 (-0.1096) | 0.3521 (+0.0271) | 0.4970 (-0.0037) | 0.4266 (-0.0287) |
|
| 575 |
+
| 2.9681 | 14600 | 2.0563 | 2.0751 | 0.4190 (-0.1214) | 0.3396 (+0.0145) | 0.4882 (-0.0124) | 0.4156 (-0.0398) |
|
| 576 |
+
| 2.9884 | 14700 | 2.0611 | 2.0767 | 0.4186 (-0.1219) | 0.3486 (+0.0236) | 0.5440 (+0.0434) | 0.4371 (-0.0183) |
|
| 577 |
+
| 3.0087 | 14800 | 2.0523 | 2.0781 | 0.4008 (-0.1396) | 0.3537 (+0.0286) | 0.4994 (-0.0013) | 0.4179 (-0.0374) |
|
| 578 |
+
| 3.0291 | 14900 | 2.0472 | 2.0788 | 0.3772 (-0.1633) | 0.3552 (+0.0301) | 0.5034 (+0.0028) | 0.4119 (-0.0434) |
|
| 579 |
+
| 3.0494 | 15000 | 2.0432 | 2.0790 | 0.4011 (-0.1393) | 0.3429 (+0.0178) | 0.4759 (-0.0247) | 0.4066 (-0.0487) |
|
| 580 |
+
| 3.0697 | 15100 | 2.041 | 2.0807 | 0.3915 (-0.1489) | 0.3361 (+0.0111) | 0.4776 (-0.0230) | 0.4018 (-0.0536) |
|
| 581 |
+
| 3.0901 | 15200 | 2.0505 | 2.0820 | 0.3796 (-0.1609) | 0.3365 (+0.0115) | 0.5330 (+0.0323) | 0.4163 (-0.0390) |
|
| 582 |
+
| 3.1104 | 15300 | 2.0542 | 2.0816 | 0.3552 (-0.1852) | 0.3331 (+0.0081) | 0.4838 (-0.0168) | 0.3907 (-0.0646) |
|
| 583 |
+
| 3.1307 | 15400 | 2.0346 | 2.0796 | 0.3655 (-0.1749) | 0.3405 (+0.0154) | 0.4532 (-0.0474) | 0.3864 (-0.0690) |
|
| 584 |
+
| 3.1510 | 15500 | 2.0384 | 2.0810 | 0.3707 (-0.1697) | 0.3496 (+0.0246) | 0.4960 (-0.0046) | 0.4054 (-0.0499) |
|
| 585 |
+
| 3.1714 | 15600 | 2.0481 | 2.0800 | 0.3712 (-0.1692) | 0.3249 (-0.0002) | 0.4778 (-0.0229) | 0.3913 (-0.0641) |
|
| 586 |
+
| 3.1917 | 15700 | 2.0423 | 2.0804 | 0.4017 (-0.1387) | 0.3351 (+0.0100) | 0.4836 (-0.0171) | 0.4068 (-0.0486) |
|
| 587 |
+
| 3.2120 | 15800 | 2.0428 | 2.0811 | 0.3726 (-0.1678) | 0.3299 (+0.0049) | 0.4817 (-0.0190) | 0.3948 (-0.0606) |
|
| 588 |
+
| 3.2324 | 15900 | 2.04 | 2.0792 | 0.3549 (-0.1855) | 0.3298 (+0.0048) | 0.4805 (-0.0201) | 0.3884 (-0.0670) |
|
| 589 |
+
| 3.2527 | 16000 | 2.0401 | 2.0810 | 0.3865 (-0.1539) | 0.3375 (+0.0125) | 0.4841 (-0.0165) | 0.4027 (-0.0526) |
|
| 590 |
+
| 3.2730 | 16100 | 2.049 | 2.0807 | 0.3778 (-0.1626) | 0.3410 (+0.0159) | 0.4833 (-0.0173) | 0.4007 (-0.0547) |
|
| 591 |
+
| 3.2934 | 16200 | 2.0434 | 2.0801 | 0.3910 (-0.1494) | 0.3415 (+0.0165) | 0.4817 (-0.0189) | 0.4048 (-0.0506) |
|
| 592 |
+
| 3.3137 | 16300 | 2.0478 | 2.0795 | 0.3813 (-0.1592) | 0.3448 (+0.0198) | 0.4504 (-0.0503) | 0.3921 (-0.0632) |
|
| 593 |
+
| 3.3340 | 16400 | 2.0405 | 2.0803 | 0.3730 (-0.1674) | 0.3376 (+0.0126) | 0.4523 (-0.0484) | 0.3876 (-0.0677) |
|
| 594 |
+
| 3.3543 | 16500 | 2.0416 | 2.0799 | 0.3515 (-0.1889) | 0.3478 (+0.0228) | 0.4594 (-0.0412) | 0.3863 (-0.0691) |
|
| 595 |
+
| 3.3747 | 16600 | 2.0511 | 2.0814 | 0.3559 (-0.1846) | 0.3309 (+0.0059) | 0.4547 (-0.0460) | 0.3805 (-0.0749) |
|
| 596 |
+
| 3.3950 | 16700 | 2.0449 | 2.0808 | 0.3683 (-0.1721) | 0.3321 (+0.0070) | 0.4841 (-0.0166) | 0.3948 (-0.0606) |
|
| 597 |
+
| 3.4153 | 16800 | 2.0442 | 2.0817 | 0.3908 (-0.1497) | 0.3318 (+0.0067) | 0.4735 (-0.0272) | 0.3987 (-0.0567) |
|
| 598 |
+
| 3.4357 | 16900 | 2.0436 | 2.0806 | 0.4016 (-0.1388) | 0.3215 (-0.0035) | 0.4638 (-0.0368) | 0.3957 (-0.0597) |
|
| 599 |
+
| 3.4560 | 17000 | 2.0542 | 2.0803 | 0.3913 (-0.1491) | 0.3333 (+0.0082) | 0.4499 (-0.0507) | 0.3915 (-0.0639) |
|
| 600 |
+
| 3.4763 | 17100 | 2.0527 | 2.0802 | 0.3967 (-0.1437) | 0.3430 (+0.0179) | 0.4714 (-0.0292) | 0.4037 (-0.0517) |
|
| 601 |
+
| 3.4966 | 17200 | 2.0435 | 2.0801 | 0.3727 (-0.1678) | 0.3320 (+0.0069) | 0.4553 (-0.0454) | 0.3866 (-0.0687) |
|
| 602 |
+
| 3.5170 | 17300 | 2.0449 | 2.0798 | 0.3930 (-0.1474) | 0.3325 (+0.0075) | 0.4417 (-0.0590) | 0.3891 (-0.0663) |
|
| 603 |
+
| 3.5373 | 17400 | 2.0527 | 2.0796 | 0.3849 (-0.1555) | 0.3438 (+0.0188) | 0.4356 (-0.0651) | 0.3881 (-0.0673) |
|
| 604 |
+
| 3.5576 | 17500 | 2.0492 | 2.0814 | 0.4077 (-0.1327) | 0.3351 (+0.0100) | 0.4579 (-0.0427) | 0.4002 (-0.0551) |
|
| 605 |
+
| 3.5780 | 17600 | 2.0472 | 2.0803 | 0.4048 (-0.1356) | 0.3323 (+0.0073) | 0.4294 (-0.0713) | 0.3888 (-0.0665) |
|
| 606 |
+
| 3.5983 | 17700 | 2.0484 | 2.0801 | 0.3831 (-0.1573) | 0.3271 (+0.0020) | 0.4581 (-0.0426) | 0.3894 (-0.0660) |
|
| 607 |
+
| 3.6186 | 17800 | 2.0529 | 2.0794 | 0.3879 (-0.1525) | 0.3381 (+0.0130) | 0.4335 (-0.0672) | 0.3865 (-0.0689) |
|
| 608 |
+
| 3.6390 | 17900 | 2.0402 | 2.0810 | 0.3675 (-0.1729) | 0.3263 (+0.0013) | 0.4520 (-0.0486) | 0.3819 (-0.0734) |
|
| 609 |
+
| 3.6593 | 18000 | 2.0498 | 2.0788 | 0.3902 (-0.1503) | 0.3411 (+0.0161) | 0.3960 (-0.1046) | 0.3758 (-0.0796) |
|
| 610 |
+
| 3.6796 | 18100 | 2.048 | 2.0807 | 0.3882 (-0.1523) | 0.3259 (+0.0008) | 0.4099 (-0.0907) | 0.3746 (-0.0807) |
|
| 611 |
+
| 3.6999 | 18200 | 2.0464 | 2.0797 | 0.3804 (-0.1601) | 0.3259 (+0.0009) | 0.4418 (-0.0589) | 0.3827 (-0.0727) |
|
| 612 |
+
| 3.7203 | 18300 | 2.0503 | 2.0816 | 0.3630 (-0.1774) | 0.3278 (+0.0028) | 0.4371 (-0.0635) | 0.3760 (-0.0794) |
|
| 613 |
+
| 3.7406 | 18400 | 2.0401 | 2.0805 | 0.3580 (-0.1824) | 0.3216 (-0.0035) | 0.4336 (-0.0670) | 0.3711 (-0.0843) |
|
| 614 |
+
| 3.7609 | 18500 | 2.0383 | 2.0820 | 0.3589 (-0.1816) | 0.3269 (+0.0019) | 0.4634 (-0.0373) | 0.3831 (-0.0723) |
|
| 615 |
+
| 3.7813 | 18600 | 2.0454 | 2.0801 | 0.3806 (-0.1598) | 0.3215 (-0.0036) | 0.4354 (-0.0652) | 0.3792 (-0.0762) |
|
| 616 |
+
| 3.8016 | 18700 | 2.0457 | 2.0811 | 0.3678 (-0.1727) | 0.3135 (-0.0116) | 0.4221 (-0.0785) | 0.3678 (-0.0876) |
|
| 617 |
+
| 3.8219 | 18800 | 2.0449 | 2.0813 | 0.3762 (-0.1642) | 0.3113 (-0.0137) | 0.4399 (-0.0608) | 0.3758 (-0.0796) |
|
| 618 |
+
| 3.8422 | 18900 | 2.0451 | 2.0797 | 0.3705 (-0.1699) | 0.3155 (-0.0095) | 0.4345 (-0.0662) | 0.3735 (-0.0819) |
|
| 619 |
+
| 3.8626 | 19000 | 2.045 | 2.0802 | 0.3937 (-0.1467) | 0.3086 (-0.0164) | 0.4491 (-0.0516) | 0.3838 (-0.0716) |
|
| 620 |
+
| 3.8829 | 19100 | 2.0551 | 2.0801 | 0.3814 (-0.1590) | 0.3164 (-0.0087) | 0.4643 (-0.0363) | 0.3874 (-0.0680) |
|
| 621 |
+
| 3.9032 | 19200 | 2.0368 | 2.0795 | 0.3833 (-0.1571) | 0.3175 (-0.0076) | 0.4464 (-0.0542) | 0.3824 (-0.0730) |
|
| 622 |
+
| 3.9236 | 19300 | 2.046 | 2.0798 | 0.3885 (-0.1519) | 0.3074 (-0.0177) | 0.4416 (-0.0590) | 0.3792 (-0.0762) |
|
| 623 |
+
| 3.9439 | 19400 | 2.0507 | 2.0802 | 0.3913 (-0.1491) | 0.3150 (-0.0100) | 0.4341 (-0.0665) | 0.3801 (-0.0752) |
|
| 624 |
+
| 3.9642 | 19500 | 2.0555 | 2.0811 | 0.3892 (-0.1513) | 0.3141 (-0.0109) | 0.4466 (-0.0540) | 0.3833 (-0.0721) |
|
| 625 |
+
| 3.9845 | 19600 | 2.0433 | 2.0810 | 0.3827 (-0.1577) | 0.3124 (-0.0126) | 0.4482 (-0.0525) | 0.3811 (-0.0743) |
|
| 626 |
+
| 4.0049 | 19700 | 2.0403 | 2.0836 | 0.4009 (-0.1396) | 0.3096 (-0.0154) | 0.4474 (-0.0532) | 0.3860 (-0.0694) |
|
| 627 |
+
| 4.0252 | 19800 | 2.0413 | 2.0828 | 0.3905 (-0.1499) | 0.3221 (-0.0030) | 0.4009 (-0.0998) | 0.3712 (-0.0842) |
|
| 628 |
+
| 4.0455 | 19900 | 2.0321 | 2.0827 | 0.3910 (-0.1494) | 0.3286 (+0.0035) | 0.4228 (-0.0778) | 0.3808 (-0.0746) |
|
| 629 |
+
| 4.0659 | 20000 | 2.0293 | 2.0832 | 0.3854 (-0.1550) | 0.3256 (+0.0006) | 0.4328 (-0.0679) | 0.3813 (-0.0741) |
|
| 630 |
+
| 4.0862 | 20100 | 2.0347 | 2.0832 | 0.3677 (-0.1727) | 0.3254 (+0.0003) | 0.4106 (-0.0900) | 0.3679 (-0.0875) |
|
| 631 |
+
| 4.1065 | 20200 | 2.0377 | 2.0843 | 0.3814 (-0.1590) | 0.3097 (-0.0154) | 0.4060 (-0.0947) | 0.3657 (-0.0897) |
|
| 632 |
+
| 4.1269 | 20300 | 2.0335 | 2.0832 | 0.3756 (-0.1648) | 0.3149 (-0.0101) | 0.3943 (-0.1063) | 0.3616 (-0.0937) |
|
| 633 |
+
| 4.1472 | 20400 | 2.0417 | 2.0828 | 0.3596 (-0.1808) | 0.3171 (-0.0080) | 0.3837 (-0.1170) | 0.3535 (-0.1019) |
|
| 634 |
+
| 4.1675 | 20500 | 2.0369 | 2.0829 | 0.3665 (-0.1739) | 0.3166 (-0.0084) | 0.3854 (-0.1152) | 0.3562 (-0.0992) |
|
| 635 |
+
| 4.1878 | 20600 | 2.0362 | 2.0849 | 0.3731 (-0.1673) | 0.3201 (-0.0049) | 0.4080 (-0.0927) | 0.3671 (-0.0883) |
|
| 636 |
+
| 4.2082 | 20700 | 2.0323 | 2.0836 | 0.3580 (-0.1824) | 0.3278 (+0.0027) | 0.3857 (-0.1149) | 0.3572 (-0.0982) |
|
| 637 |
+
| 4.2285 | 20800 | 2.0383 | 2.0850 | 0.3620 (-0.1784) | 0.3230 (-0.0021) | 0.4202 (-0.0804) | 0.3684 (-0.0870) |
|
| 638 |
+
| 4.2488 | 20900 | 2.0286 | 2.0844 | 0.3646 (-0.1759) | 0.3286 (+0.0036) | 0.4114 (-0.0893) | 0.3682 (-0.0872) |
|
| 639 |
+
| 4.2692 | 21000 | 2.0305 | 2.0839 | 0.3681 (-0.1723) | 0.3274 (+0.0023) | 0.4230 (-0.0776) | 0.3729 (-0.0825) |
|
| 640 |
+
| 4.2895 | 21100 | 2.0381 | 2.0835 | 0.3720 (-0.1684) | 0.3228 (-0.0023) | 0.4009 (-0.0998) | 0.3652 (-0.0902) |
|
| 641 |
+
| 4.3098 | 21200 | 2.036 | 2.0837 | 0.3654 (-0.1750) | 0.3177 (-0.0074) | 0.4037 (-0.0969) | 0.3622 (-0.0931) |
|
| 642 |
+
| 4.3301 | 21300 | 2.0409 | 2.0843 | 0.3596 (-0.1808) | 0.3207 (-0.0044) | 0.4013 (-0.0993) | 0.3605 (-0.0948) |
|
| 643 |
+
| 4.3505 | 21400 | 2.0454 | 2.0842 | 0.3745 (-0.1659) | 0.3199 (-0.0051) | 0.4009 (-0.0997) | 0.3651 (-0.0902) |
|
| 644 |
+
| 4.3708 | 21500 | 2.0349 | 2.0839 | 0.3685 (-0.1720) | 0.3237 (-0.0013) | 0.3973 (-0.1033) | 0.3632 (-0.0922) |
|
| 645 |
+
| 4.3911 | 21600 | 2.0437 | 2.0832 | 0.3719 (-0.1685) | 0.3228 (-0.0022) | 0.4007 (-0.1000) | 0.3651 (-0.0902) |
|
| 646 |
+
| 4.4115 | 21700 | 2.034 | 2.0838 | 0.3768 (-0.1636) | 0.3313 (+0.0063) | 0.4150 (-0.0857) | 0.3744 (-0.0810) |
|
| 647 |
+
| 4.4318 | 21800 | 2.0308 | 2.0837 | 0.3719 (-0.1685) | 0.3264 (+0.0014) | 0.4148 (-0.0858) | 0.3711 (-0.0843) |
|
| 648 |
+
| 4.4521 | 21900 | 2.0427 | 2.0847 | 0.3695 (-0.1709) | 0.3210 (-0.0040) | 0.4038 (-0.0968) | 0.3648 (-0.0906) |
|
| 649 |
+
| 4.4725 | 22000 | 2.034 | 2.0845 | 0.3745 (-0.1659) | 0.3207 (-0.0043) | 0.3993 (-0.1013) | 0.3648 (-0.0905) |
|
| 650 |
+
| 4.4928 | 22100 | 2.0393 | 2.0827 | 0.3685 (-0.1719) | 0.3208 (-0.0042) | 0.3833 (-0.1173) | 0.3576 (-0.0978) |
|
| 651 |
+
| 4.5131 | 22200 | 2.0426 | 2.0834 | 0.3743 (-0.1661) | 0.3167 (-0.0084) | 0.4092 (-0.0914) | 0.3667 (-0.0887) |
|
| 652 |
+
| 4.5334 | 22300 | 2.0323 | 2.0839 | 0.3595 (-0.1809) | 0.3165 (-0.0086) | 0.3945 (-0.1061) | 0.3568 (-0.0985) |
|
| 653 |
+
| 4.5538 | 22400 | 2.0361 | 2.0834 | 0.3695 (-0.1709) | 0.3190 (-0.0061) | 0.4070 (-0.0937) | 0.3652 (-0.0902) |
|
| 654 |
+
| 4.5741 | 22500 | 2.0424 | 2.0838 | 0.3448 (-0.1957) | 0.3169 (-0.0081) | 0.3964 (-0.1043) | 0.3527 (-0.1027) |
|
| 655 |
+
| 4.5944 | 22600 | 2.038 | 2.0833 | 0.3617 (-0.1787) | 0.3170 (-0.0081) | 0.3965 (-0.1041) | 0.3584 (-0.0970) |
|
| 656 |
+
| 4.6148 | 22700 | 2.0406 | 2.0833 | 0.3707 (-0.1697) | 0.3188 (-0.0063) | 0.4003 (-0.1003) | 0.3633 (-0.0921) |
|
| 657 |
+
| 4.6351 | 22800 | 2.0396 | 2.0831 | 0.3710 (-0.1694) | 0.3206 (-0.0044) | 0.4028 (-0.0978) | 0.3648 (-0.0905) |
|
| 658 |
+
| 4.6554 | 22900 | 2.0362 | 2.0839 | 0.3639 (-0.1766) | 0.3210 (-0.0040) | 0.3956 (-0.1050) | 0.3602 (-0.0952) |
|
| 659 |
+
| 4.6757 | 23000 | 2.0508 | 2.0838 | 0.3545 (-0.1859) | 0.3205 (-0.0045) | 0.4059 (-0.0948) | 0.3603 (-0.0951) |
|
| 660 |
+
| 4.6961 | 23100 | 2.0443 | 2.0840 | 0.3719 (-0.1685) | 0.3198 (-0.0052) | 0.3937 (-0.1069) | 0.3618 (-0.0936) |
|
| 661 |
+
| 4.7164 | 23200 | 2.0316 | 2.0837 | 0.3773 (-0.1631) | 0.3199 (-0.0051) | 0.3951 (-0.1056) | 0.3641 (-0.0913) |
|
| 662 |
+
| 4.7367 | 23300 | 2.0444 | 2.0844 | 0.3749 (-0.1655) | 0.3179 (-0.0072) | 0.4029 (-0.0977) | 0.3652 (-0.0901) |
|
| 663 |
+
| 4.7571 | 23400 | 2.0464 | 2.0838 | 0.3596 (-0.1808) | 0.3167 (-0.0083) | 0.4037 (-0.0969) | 0.3600 (-0.0953) |
|
| 664 |
+
| 4.7774 | 23500 | 2.0297 | 2.0841 | 0.3694 (-0.1710) | 0.3165 (-0.0086) | 0.4087 (-0.0919) | 0.3649 (-0.0905) |
|
| 665 |
+
| 4.7977 | 23600 | 2.0305 | 2.0838 | 0.3628 (-0.1777) | 0.3192 (-0.0058) | 0.4024 (-0.0983) | 0.3615 (-0.0939) |
|
| 666 |
+
| 4.8181 | 23700 | 2.0421 | 2.0839 | 0.3559 (-0.1845) | 0.3185 (-0.0065) | 0.3963 (-0.1043) | 0.3569 (-0.0984) |
|
| 667 |
+
| 4.8384 | 23800 | 2.0331 | 2.0843 | 0.3645 (-0.1759) | 0.3176 (-0.0074) | 0.4027 (-0.0979) | 0.3616 (-0.0937) |
|
| 668 |
+
| 4.8587 | 23900 | 2.0331 | 2.0840 | 0.3628 (-0.1776) | 0.3184 (-0.0067) | 0.4042 (-0.0964) | 0.3618 (-0.0936) |
|
| 669 |
+
| 4.8790 | 24000 | 2.0361 | 2.0838 | 0.3623 (-0.1782) | 0.3171 (-0.0080) | 0.4024 (-0.0982) | 0.3606 (-0.0948) |
|
| 670 |
+
| 4.8994 | 24100 | 2.029 | 2.0842 | 0.3628 (-0.1777) | 0.3192 (-0.0058) | 0.4028 (-0.0979) | 0.3616 (-0.0938) |
|
| 671 |
+
| 4.9197 | 24200 | 2.0257 | 2.0844 | 0.3642 (-0.1763) | 0.3224 (-0.0027) | 0.3956 (-0.1050) | 0.3607 (-0.0947) |
|
| 672 |
+
| 4.9400 | 24300 | 2.0298 | 2.0842 | 0.3685 (-0.1719) | 0.3193 (-0.0057) | 0.3964 (-0.1043) | 0.3614 (-0.0939) |
|
| 673 |
+
| 4.9604 | 24400 | 2.0412 | 2.0841 | 0.3663 (-0.1742) | 0.3161 (-0.0090) | 0.4009 (-0.0997) | 0.3611 (-0.0943) |
|
| 674 |
+
| 4.9807 | 24500 | 2.0376 | 2.0840 | 0.3636 (-0.1768) | 0.3156 (-0.0095) | 0.4008 (-0.0998) | 0.3600 (-0.0954) |
|
| 675 |
+
| -1 | -1 | - | - | 0.5348 (-0.0056) | 0.3376 (+0.0126) | 0.5534 (+0.0528) | 0.4753 (+0.0199) |
|
| 676 |
+
|
| 677 |
+
* The bold row denotes the saved checkpoint.
|
| 678 |
+
</details>
|
| 679 |
+
|
| 680 |
+
### Framework Versions
|
| 681 |
+
- Python: 3.10.18
|
| 682 |
+
- Sentence Transformers: 5.0.0
|
| 683 |
+
- Transformers: 4.56.0.dev0
|
| 684 |
+
- PyTorch: 2.7.1+cu126
|
| 685 |
+
- Accelerate: 1.9.0
|
| 686 |
+
- Datasets: 4.0.0
|
| 687 |
+
- Tokenizers: 0.21.4
|
| 688 |
+
|
| 689 |
+
## Citation
|
| 690 |
+
|
| 691 |
+
### BibTeX
|
| 692 |
+
|
| 693 |
+
#### Sentence Transformers
|
| 694 |
+
```bibtex
|
| 695 |
+
@inproceedings{reimers-2019-sentence-bert,
|
| 696 |
+
title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
|
| 697 |
+
author = "Reimers, Nils and Gurevych, Iryna",
|
| 698 |
+
booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
|
| 699 |
+
month = "11",
|
| 700 |
+
year = "2019",
|
| 701 |
+
publisher = "Association for Computational Linguistics",
|
| 702 |
+
url = "https://arxiv.org/abs/1908.10084",
|
| 703 |
+
}
|
| 704 |
+
```
|
| 705 |
+
|
| 706 |
+
#### ListNetLoss
|
| 707 |
+
```bibtex
|
| 708 |
+
@inproceedings{cao2007learning,
|
| 709 |
+
title={Learning to Rank: From Pairwise Approach to Listwise Approach},
|
| 710 |
+
author={Cao, Zhe and Qin, Tao and Liu, Tie-Yan and Tsai, Ming-Feng and Li, Hang},
|
| 711 |
+
booktitle={Proceedings of the 24th international conference on Machine learning},
|
| 712 |
+
pages={129--136},
|
| 713 |
+
year={2007}
|
| 714 |
+
}
|
| 715 |
+
```
|
| 716 |
+
|
| 717 |
+
<!--
|
| 718 |
+
## Glossary
|
| 719 |
+
|
| 720 |
+
*Clearly define terms in order to be accessible across audiences.*
|
| 721 |
+
-->
|
| 722 |
+
|
| 723 |
+
<!--
|
| 724 |
+
## Model Card Authors
|
| 725 |
+
|
| 726 |
+
*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
|
| 727 |
+
-->
|
| 728 |
+
|
| 729 |
+
<!--
|
| 730 |
+
## Model Card Contact
|
| 731 |
+
|
| 732 |
+
*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
|
| 733 |
+
-->
|
config.json
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architectures": [
|
| 3 |
+
"ModernBertForSequenceClassification"
|
| 4 |
+
],
|
| 5 |
+
"attention_bias": false,
|
| 6 |
+
"attention_dropout": 0.0,
|
| 7 |
+
"bos_token_id": 50281,
|
| 8 |
+
"causal_mask": false,
|
| 9 |
+
"classifier_activation": "gelu",
|
| 10 |
+
"classifier_bias": false,
|
| 11 |
+
"classifier_dropout": 0.0,
|
| 12 |
+
"classifier_pooling": "mean",
|
| 13 |
+
"cls_token_id": 50281,
|
| 14 |
+
"decoder_bias": true,
|
| 15 |
+
"deterministic_flash_attn": false,
|
| 16 |
+
"embedding_dropout": 0.0,
|
| 17 |
+
"eos_token_id": 50282,
|
| 18 |
+
"global_attn_every_n_layers": 3,
|
| 19 |
+
"global_rope_theta": 160000.0,
|
| 20 |
+
"gradient_checkpointing": false,
|
| 21 |
+
"hidden_activation": "gelu",
|
| 22 |
+
"hidden_size": 384,
|
| 23 |
+
"id2label": {
|
| 24 |
+
"0": "LABEL_0"
|
| 25 |
+
},
|
| 26 |
+
"initializer_cutoff_factor": 2.0,
|
| 27 |
+
"initializer_range": 0.02,
|
| 28 |
+
"intermediate_size": 576,
|
| 29 |
+
"is_causal": false,
|
| 30 |
+
"label2id": {
|
| 31 |
+
"LABEL_0": 0
|
| 32 |
+
},
|
| 33 |
+
"layer_norm_eps": 1e-05,
|
| 34 |
+
"local_attention": 128,
|
| 35 |
+
"local_rope_theta": 160000.0,
|
| 36 |
+
"max_position_embeddings": 7999,
|
| 37 |
+
"mlp_bias": false,
|
| 38 |
+
"mlp_dropout": 0.0,
|
| 39 |
+
"model_type": "modernbert",
|
| 40 |
+
"norm_bias": false,
|
| 41 |
+
"norm_eps": 1e-05,
|
| 42 |
+
"num_attention_heads": 6,
|
| 43 |
+
"num_hidden_layers": 10,
|
| 44 |
+
"pad_token_id": 50283,
|
| 45 |
+
"position_embedding_type": "sans_pos",
|
| 46 |
+
"repad_logits_with_grad": false,
|
| 47 |
+
"sentence_transformers": {
|
| 48 |
+
"activation_fn": "torch.nn.modules.activation.Sigmoid",
|
| 49 |
+
"version": "5.0.0"
|
| 50 |
+
},
|
| 51 |
+
"sep_token_id": 50282,
|
| 52 |
+
"sparse_pred_ignore_index": -100,
|
| 53 |
+
"sparse_prediction": false,
|
| 54 |
+
"torch_dtype": "float32",
|
| 55 |
+
"transformers_version": "4.56.0.dev0",
|
| 56 |
+
"vocab_size": 50368
|
| 57 |
+
}
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:0d600127f0eff7d5c18aa8beec7cdb1424f58872cac468640c02dbe4822812ff
|
| 3 |
+
size 128132084
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"cls_token": {
|
| 3 |
+
"content": "[CLS]",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"mask_token": {
|
| 10 |
+
"content": "[MASK]",
|
| 11 |
+
"lstrip": true,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "[PAD]",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
},
|
| 23 |
+
"sep_token": {
|
| 24 |
+
"content": "[SEP]",
|
| 25 |
+
"lstrip": false,
|
| 26 |
+
"normalized": false,
|
| 27 |
+
"rstrip": false,
|
| 28 |
+
"single_word": false
|
| 29 |
+
},
|
| 30 |
+
"unk_token": {
|
| 31 |
+
"content": "[UNK]",
|
| 32 |
+
"lstrip": false,
|
| 33 |
+
"normalized": false,
|
| 34 |
+
"rstrip": false,
|
| 35 |
+
"single_word": false
|
| 36 |
+
}
|
| 37 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,945 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "|||IP_ADDRESS|||",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": true,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": false
|
| 10 |
+
},
|
| 11 |
+
"1": {
|
| 12 |
+
"content": "<|padding|>",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"50254": {
|
| 20 |
+
"content": " ",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": true,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": false
|
| 26 |
+
},
|
| 27 |
+
"50255": {
|
| 28 |
+
"content": " ",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": true,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": false
|
| 34 |
+
},
|
| 35 |
+
"50256": {
|
| 36 |
+
"content": " ",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": true,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": false
|
| 42 |
+
},
|
| 43 |
+
"50257": {
|
| 44 |
+
"content": " ",
|
| 45 |
+
"lstrip": false,
|
| 46 |
+
"normalized": true,
|
| 47 |
+
"rstrip": false,
|
| 48 |
+
"single_word": false,
|
| 49 |
+
"special": false
|
| 50 |
+
},
|
| 51 |
+
"50258": {
|
| 52 |
+
"content": " ",
|
| 53 |
+
"lstrip": false,
|
| 54 |
+
"normalized": true,
|
| 55 |
+
"rstrip": false,
|
| 56 |
+
"single_word": false,
|
| 57 |
+
"special": false
|
| 58 |
+
},
|
| 59 |
+
"50259": {
|
| 60 |
+
"content": " ",
|
| 61 |
+
"lstrip": false,
|
| 62 |
+
"normalized": true,
|
| 63 |
+
"rstrip": false,
|
| 64 |
+
"single_word": false,
|
| 65 |
+
"special": false
|
| 66 |
+
},
|
| 67 |
+
"50260": {
|
| 68 |
+
"content": " ",
|
| 69 |
+
"lstrip": false,
|
| 70 |
+
"normalized": true,
|
| 71 |
+
"rstrip": false,
|
| 72 |
+
"single_word": false,
|
| 73 |
+
"special": false
|
| 74 |
+
},
|
| 75 |
+
"50261": {
|
| 76 |
+
"content": " ",
|
| 77 |
+
"lstrip": false,
|
| 78 |
+
"normalized": true,
|
| 79 |
+
"rstrip": false,
|
| 80 |
+
"single_word": false,
|
| 81 |
+
"special": false
|
| 82 |
+
},
|
| 83 |
+
"50262": {
|
| 84 |
+
"content": " ",
|
| 85 |
+
"lstrip": false,
|
| 86 |
+
"normalized": true,
|
| 87 |
+
"rstrip": false,
|
| 88 |
+
"single_word": false,
|
| 89 |
+
"special": false
|
| 90 |
+
},
|
| 91 |
+
"50263": {
|
| 92 |
+
"content": " ",
|
| 93 |
+
"lstrip": false,
|
| 94 |
+
"normalized": true,
|
| 95 |
+
"rstrip": false,
|
| 96 |
+
"single_word": false,
|
| 97 |
+
"special": false
|
| 98 |
+
},
|
| 99 |
+
"50264": {
|
| 100 |
+
"content": " ",
|
| 101 |
+
"lstrip": false,
|
| 102 |
+
"normalized": true,
|
| 103 |
+
"rstrip": false,
|
| 104 |
+
"single_word": false,
|
| 105 |
+
"special": false
|
| 106 |
+
},
|
| 107 |
+
"50265": {
|
| 108 |
+
"content": " ",
|
| 109 |
+
"lstrip": false,
|
| 110 |
+
"normalized": true,
|
| 111 |
+
"rstrip": false,
|
| 112 |
+
"single_word": false,
|
| 113 |
+
"special": false
|
| 114 |
+
},
|
| 115 |
+
"50266": {
|
| 116 |
+
"content": " ",
|
| 117 |
+
"lstrip": false,
|
| 118 |
+
"normalized": true,
|
| 119 |
+
"rstrip": false,
|
| 120 |
+
"single_word": false,
|
| 121 |
+
"special": false
|
| 122 |
+
},
|
| 123 |
+
"50267": {
|
| 124 |
+
"content": " ",
|
| 125 |
+
"lstrip": false,
|
| 126 |
+
"normalized": true,
|
| 127 |
+
"rstrip": false,
|
| 128 |
+
"single_word": false,
|
| 129 |
+
"special": false
|
| 130 |
+
},
|
| 131 |
+
"50268": {
|
| 132 |
+
"content": " ",
|
| 133 |
+
"lstrip": false,
|
| 134 |
+
"normalized": true,
|
| 135 |
+
"rstrip": false,
|
| 136 |
+
"single_word": false,
|
| 137 |
+
"special": false
|
| 138 |
+
},
|
| 139 |
+
"50269": {
|
| 140 |
+
"content": " ",
|
| 141 |
+
"lstrip": false,
|
| 142 |
+
"normalized": true,
|
| 143 |
+
"rstrip": false,
|
| 144 |
+
"single_word": false,
|
| 145 |
+
"special": false
|
| 146 |
+
},
|
| 147 |
+
"50270": {
|
| 148 |
+
"content": " ",
|
| 149 |
+
"lstrip": false,
|
| 150 |
+
"normalized": true,
|
| 151 |
+
"rstrip": false,
|
| 152 |
+
"single_word": false,
|
| 153 |
+
"special": false
|
| 154 |
+
},
|
| 155 |
+
"50271": {
|
| 156 |
+
"content": " ",
|
| 157 |
+
"lstrip": false,
|
| 158 |
+
"normalized": true,
|
| 159 |
+
"rstrip": false,
|
| 160 |
+
"single_word": false,
|
| 161 |
+
"special": false
|
| 162 |
+
},
|
| 163 |
+
"50272": {
|
| 164 |
+
"content": " ",
|
| 165 |
+
"lstrip": false,
|
| 166 |
+
"normalized": true,
|
| 167 |
+
"rstrip": false,
|
| 168 |
+
"single_word": false,
|
| 169 |
+
"special": false
|
| 170 |
+
},
|
| 171 |
+
"50273": {
|
| 172 |
+
"content": " ",
|
| 173 |
+
"lstrip": false,
|
| 174 |
+
"normalized": true,
|
| 175 |
+
"rstrip": false,
|
| 176 |
+
"single_word": false,
|
| 177 |
+
"special": false
|
| 178 |
+
},
|
| 179 |
+
"50274": {
|
| 180 |
+
"content": " ",
|
| 181 |
+
"lstrip": false,
|
| 182 |
+
"normalized": true,
|
| 183 |
+
"rstrip": false,
|
| 184 |
+
"single_word": false,
|
| 185 |
+
"special": false
|
| 186 |
+
},
|
| 187 |
+
"50275": {
|
| 188 |
+
"content": " ",
|
| 189 |
+
"lstrip": false,
|
| 190 |
+
"normalized": true,
|
| 191 |
+
"rstrip": false,
|
| 192 |
+
"single_word": false,
|
| 193 |
+
"special": false
|
| 194 |
+
},
|
| 195 |
+
"50276": {
|
| 196 |
+
"content": " ",
|
| 197 |
+
"lstrip": false,
|
| 198 |
+
"normalized": true,
|
| 199 |
+
"rstrip": false,
|
| 200 |
+
"single_word": false,
|
| 201 |
+
"special": false
|
| 202 |
+
},
|
| 203 |
+
"50277": {
|
| 204 |
+
"content": "|||EMAIL_ADDRESS|||",
|
| 205 |
+
"lstrip": false,
|
| 206 |
+
"normalized": true,
|
| 207 |
+
"rstrip": false,
|
| 208 |
+
"single_word": false,
|
| 209 |
+
"special": false
|
| 210 |
+
},
|
| 211 |
+
"50278": {
|
| 212 |
+
"content": "|||PHONE_NUMBER|||",
|
| 213 |
+
"lstrip": false,
|
| 214 |
+
"normalized": true,
|
| 215 |
+
"rstrip": false,
|
| 216 |
+
"single_word": false,
|
| 217 |
+
"special": false
|
| 218 |
+
},
|
| 219 |
+
"50279": {
|
| 220 |
+
"content": "<|endoftext|>",
|
| 221 |
+
"lstrip": false,
|
| 222 |
+
"normalized": false,
|
| 223 |
+
"rstrip": false,
|
| 224 |
+
"single_word": false,
|
| 225 |
+
"special": true
|
| 226 |
+
},
|
| 227 |
+
"50280": {
|
| 228 |
+
"content": "[UNK]",
|
| 229 |
+
"lstrip": false,
|
| 230 |
+
"normalized": false,
|
| 231 |
+
"rstrip": false,
|
| 232 |
+
"single_word": false,
|
| 233 |
+
"special": true
|
| 234 |
+
},
|
| 235 |
+
"50281": {
|
| 236 |
+
"content": "[CLS]",
|
| 237 |
+
"lstrip": false,
|
| 238 |
+
"normalized": false,
|
| 239 |
+
"rstrip": false,
|
| 240 |
+
"single_word": false,
|
| 241 |
+
"special": true
|
| 242 |
+
},
|
| 243 |
+
"50282": {
|
| 244 |
+
"content": "[SEP]",
|
| 245 |
+
"lstrip": false,
|
| 246 |
+
"normalized": false,
|
| 247 |
+
"rstrip": false,
|
| 248 |
+
"single_word": false,
|
| 249 |
+
"special": true
|
| 250 |
+
},
|
| 251 |
+
"50283": {
|
| 252 |
+
"content": "[PAD]",
|
| 253 |
+
"lstrip": false,
|
| 254 |
+
"normalized": false,
|
| 255 |
+
"rstrip": false,
|
| 256 |
+
"single_word": false,
|
| 257 |
+
"special": true
|
| 258 |
+
},
|
| 259 |
+
"50284": {
|
| 260 |
+
"content": "[MASK]",
|
| 261 |
+
"lstrip": true,
|
| 262 |
+
"normalized": false,
|
| 263 |
+
"rstrip": false,
|
| 264 |
+
"single_word": false,
|
| 265 |
+
"special": true
|
| 266 |
+
},
|
| 267 |
+
"50285": {
|
| 268 |
+
"content": "[unused0]",
|
| 269 |
+
"lstrip": false,
|
| 270 |
+
"normalized": true,
|
| 271 |
+
"rstrip": false,
|
| 272 |
+
"single_word": false,
|
| 273 |
+
"special": false
|
| 274 |
+
},
|
| 275 |
+
"50286": {
|
| 276 |
+
"content": "[unused1]",
|
| 277 |
+
"lstrip": false,
|
| 278 |
+
"normalized": true,
|
| 279 |
+
"rstrip": false,
|
| 280 |
+
"single_word": false,
|
| 281 |
+
"special": false
|
| 282 |
+
},
|
| 283 |
+
"50287": {
|
| 284 |
+
"content": "[unused2]",
|
| 285 |
+
"lstrip": false,
|
| 286 |
+
"normalized": true,
|
| 287 |
+
"rstrip": false,
|
| 288 |
+
"single_word": false,
|
| 289 |
+
"special": false
|
| 290 |
+
},
|
| 291 |
+
"50288": {
|
| 292 |
+
"content": "[unused3]",
|
| 293 |
+
"lstrip": false,
|
| 294 |
+
"normalized": true,
|
| 295 |
+
"rstrip": false,
|
| 296 |
+
"single_word": false,
|
| 297 |
+
"special": false
|
| 298 |
+
},
|
| 299 |
+
"50289": {
|
| 300 |
+
"content": "[unused4]",
|
| 301 |
+
"lstrip": false,
|
| 302 |
+
"normalized": true,
|
| 303 |
+
"rstrip": false,
|
| 304 |
+
"single_word": false,
|
| 305 |
+
"special": false
|
| 306 |
+
},
|
| 307 |
+
"50290": {
|
| 308 |
+
"content": "[unused5]",
|
| 309 |
+
"lstrip": false,
|
| 310 |
+
"normalized": true,
|
| 311 |
+
"rstrip": false,
|
| 312 |
+
"single_word": false,
|
| 313 |
+
"special": false
|
| 314 |
+
},
|
| 315 |
+
"50291": {
|
| 316 |
+
"content": "[unused6]",
|
| 317 |
+
"lstrip": false,
|
| 318 |
+
"normalized": true,
|
| 319 |
+
"rstrip": false,
|
| 320 |
+
"single_word": false,
|
| 321 |
+
"special": false
|
| 322 |
+
},
|
| 323 |
+
"50292": {
|
| 324 |
+
"content": "[unused7]",
|
| 325 |
+
"lstrip": false,
|
| 326 |
+
"normalized": true,
|
| 327 |
+
"rstrip": false,
|
| 328 |
+
"single_word": false,
|
| 329 |
+
"special": false
|
| 330 |
+
},
|
| 331 |
+
"50293": {
|
| 332 |
+
"content": "[unused8]",
|
| 333 |
+
"lstrip": false,
|
| 334 |
+
"normalized": true,
|
| 335 |
+
"rstrip": false,
|
| 336 |
+
"single_word": false,
|
| 337 |
+
"special": false
|
| 338 |
+
},
|
| 339 |
+
"50294": {
|
| 340 |
+
"content": "[unused9]",
|
| 341 |
+
"lstrip": false,
|
| 342 |
+
"normalized": true,
|
| 343 |
+
"rstrip": false,
|
| 344 |
+
"single_word": false,
|
| 345 |
+
"special": false
|
| 346 |
+
},
|
| 347 |
+
"50295": {
|
| 348 |
+
"content": "[unused10]",
|
| 349 |
+
"lstrip": false,
|
| 350 |
+
"normalized": true,
|
| 351 |
+
"rstrip": false,
|
| 352 |
+
"single_word": false,
|
| 353 |
+
"special": false
|
| 354 |
+
},
|
| 355 |
+
"50296": {
|
| 356 |
+
"content": "[unused11]",
|
| 357 |
+
"lstrip": false,
|
| 358 |
+
"normalized": true,
|
| 359 |
+
"rstrip": false,
|
| 360 |
+
"single_word": false,
|
| 361 |
+
"special": false
|
| 362 |
+
},
|
| 363 |
+
"50297": {
|
| 364 |
+
"content": "[unused12]",
|
| 365 |
+
"lstrip": false,
|
| 366 |
+
"normalized": true,
|
| 367 |
+
"rstrip": false,
|
| 368 |
+
"single_word": false,
|
| 369 |
+
"special": false
|
| 370 |
+
},
|
| 371 |
+
"50298": {
|
| 372 |
+
"content": "[unused13]",
|
| 373 |
+
"lstrip": false,
|
| 374 |
+
"normalized": true,
|
| 375 |
+
"rstrip": false,
|
| 376 |
+
"single_word": false,
|
| 377 |
+
"special": false
|
| 378 |
+
},
|
| 379 |
+
"50299": {
|
| 380 |
+
"content": "[unused14]",
|
| 381 |
+
"lstrip": false,
|
| 382 |
+
"normalized": true,
|
| 383 |
+
"rstrip": false,
|
| 384 |
+
"single_word": false,
|
| 385 |
+
"special": false
|
| 386 |
+
},
|
| 387 |
+
"50300": {
|
| 388 |
+
"content": "[unused15]",
|
| 389 |
+
"lstrip": false,
|
| 390 |
+
"normalized": true,
|
| 391 |
+
"rstrip": false,
|
| 392 |
+
"single_word": false,
|
| 393 |
+
"special": false
|
| 394 |
+
},
|
| 395 |
+
"50301": {
|
| 396 |
+
"content": "[unused16]",
|
| 397 |
+
"lstrip": false,
|
| 398 |
+
"normalized": true,
|
| 399 |
+
"rstrip": false,
|
| 400 |
+
"single_word": false,
|
| 401 |
+
"special": false
|
| 402 |
+
},
|
| 403 |
+
"50302": {
|
| 404 |
+
"content": "[unused17]",
|
| 405 |
+
"lstrip": false,
|
| 406 |
+
"normalized": true,
|
| 407 |
+
"rstrip": false,
|
| 408 |
+
"single_word": false,
|
| 409 |
+
"special": false
|
| 410 |
+
},
|
| 411 |
+
"50303": {
|
| 412 |
+
"content": "[unused18]",
|
| 413 |
+
"lstrip": false,
|
| 414 |
+
"normalized": true,
|
| 415 |
+
"rstrip": false,
|
| 416 |
+
"single_word": false,
|
| 417 |
+
"special": false
|
| 418 |
+
},
|
| 419 |
+
"50304": {
|
| 420 |
+
"content": "[unused19]",
|
| 421 |
+
"lstrip": false,
|
| 422 |
+
"normalized": true,
|
| 423 |
+
"rstrip": false,
|
| 424 |
+
"single_word": false,
|
| 425 |
+
"special": false
|
| 426 |
+
},
|
| 427 |
+
"50305": {
|
| 428 |
+
"content": "[unused20]",
|
| 429 |
+
"lstrip": false,
|
| 430 |
+
"normalized": true,
|
| 431 |
+
"rstrip": false,
|
| 432 |
+
"single_word": false,
|
| 433 |
+
"special": false
|
| 434 |
+
},
|
| 435 |
+
"50306": {
|
| 436 |
+
"content": "[unused21]",
|
| 437 |
+
"lstrip": false,
|
| 438 |
+
"normalized": true,
|
| 439 |
+
"rstrip": false,
|
| 440 |
+
"single_word": false,
|
| 441 |
+
"special": false
|
| 442 |
+
},
|
| 443 |
+
"50307": {
|
| 444 |
+
"content": "[unused22]",
|
| 445 |
+
"lstrip": false,
|
| 446 |
+
"normalized": true,
|
| 447 |
+
"rstrip": false,
|
| 448 |
+
"single_word": false,
|
| 449 |
+
"special": false
|
| 450 |
+
},
|
| 451 |
+
"50308": {
|
| 452 |
+
"content": "[unused23]",
|
| 453 |
+
"lstrip": false,
|
| 454 |
+
"normalized": true,
|
| 455 |
+
"rstrip": false,
|
| 456 |
+
"single_word": false,
|
| 457 |
+
"special": false
|
| 458 |
+
},
|
| 459 |
+
"50309": {
|
| 460 |
+
"content": "[unused24]",
|
| 461 |
+
"lstrip": false,
|
| 462 |
+
"normalized": true,
|
| 463 |
+
"rstrip": false,
|
| 464 |
+
"single_word": false,
|
| 465 |
+
"special": false
|
| 466 |
+
},
|
| 467 |
+
"50310": {
|
| 468 |
+
"content": "[unused25]",
|
| 469 |
+
"lstrip": false,
|
| 470 |
+
"normalized": true,
|
| 471 |
+
"rstrip": false,
|
| 472 |
+
"single_word": false,
|
| 473 |
+
"special": false
|
| 474 |
+
},
|
| 475 |
+
"50311": {
|
| 476 |
+
"content": "[unused26]",
|
| 477 |
+
"lstrip": false,
|
| 478 |
+
"normalized": true,
|
| 479 |
+
"rstrip": false,
|
| 480 |
+
"single_word": false,
|
| 481 |
+
"special": false
|
| 482 |
+
},
|
| 483 |
+
"50312": {
|
| 484 |
+
"content": "[unused27]",
|
| 485 |
+
"lstrip": false,
|
| 486 |
+
"normalized": true,
|
| 487 |
+
"rstrip": false,
|
| 488 |
+
"single_word": false,
|
| 489 |
+
"special": false
|
| 490 |
+
},
|
| 491 |
+
"50313": {
|
| 492 |
+
"content": "[unused28]",
|
| 493 |
+
"lstrip": false,
|
| 494 |
+
"normalized": true,
|
| 495 |
+
"rstrip": false,
|
| 496 |
+
"single_word": false,
|
| 497 |
+
"special": false
|
| 498 |
+
},
|
| 499 |
+
"50314": {
|
| 500 |
+
"content": "[unused29]",
|
| 501 |
+
"lstrip": false,
|
| 502 |
+
"normalized": true,
|
| 503 |
+
"rstrip": false,
|
| 504 |
+
"single_word": false,
|
| 505 |
+
"special": false
|
| 506 |
+
},
|
| 507 |
+
"50315": {
|
| 508 |
+
"content": "[unused30]",
|
| 509 |
+
"lstrip": false,
|
| 510 |
+
"normalized": true,
|
| 511 |
+
"rstrip": false,
|
| 512 |
+
"single_word": false,
|
| 513 |
+
"special": false
|
| 514 |
+
},
|
| 515 |
+
"50316": {
|
| 516 |
+
"content": "[unused31]",
|
| 517 |
+
"lstrip": false,
|
| 518 |
+
"normalized": true,
|
| 519 |
+
"rstrip": false,
|
| 520 |
+
"single_word": false,
|
| 521 |
+
"special": false
|
| 522 |
+
},
|
| 523 |
+
"50317": {
|
| 524 |
+
"content": "[unused32]",
|
| 525 |
+
"lstrip": false,
|
| 526 |
+
"normalized": true,
|
| 527 |
+
"rstrip": false,
|
| 528 |
+
"single_word": false,
|
| 529 |
+
"special": false
|
| 530 |
+
},
|
| 531 |
+
"50318": {
|
| 532 |
+
"content": "[unused33]",
|
| 533 |
+
"lstrip": false,
|
| 534 |
+
"normalized": true,
|
| 535 |
+
"rstrip": false,
|
| 536 |
+
"single_word": false,
|
| 537 |
+
"special": false
|
| 538 |
+
},
|
| 539 |
+
"50319": {
|
| 540 |
+
"content": "[unused34]",
|
| 541 |
+
"lstrip": false,
|
| 542 |
+
"normalized": true,
|
| 543 |
+
"rstrip": false,
|
| 544 |
+
"single_word": false,
|
| 545 |
+
"special": false
|
| 546 |
+
},
|
| 547 |
+
"50320": {
|
| 548 |
+
"content": "[unused35]",
|
| 549 |
+
"lstrip": false,
|
| 550 |
+
"normalized": true,
|
| 551 |
+
"rstrip": false,
|
| 552 |
+
"single_word": false,
|
| 553 |
+
"special": false
|
| 554 |
+
},
|
| 555 |
+
"50321": {
|
| 556 |
+
"content": "[unused36]",
|
| 557 |
+
"lstrip": false,
|
| 558 |
+
"normalized": true,
|
| 559 |
+
"rstrip": false,
|
| 560 |
+
"single_word": false,
|
| 561 |
+
"special": false
|
| 562 |
+
},
|
| 563 |
+
"50322": {
|
| 564 |
+
"content": "[unused37]",
|
| 565 |
+
"lstrip": false,
|
| 566 |
+
"normalized": true,
|
| 567 |
+
"rstrip": false,
|
| 568 |
+
"single_word": false,
|
| 569 |
+
"special": false
|
| 570 |
+
},
|
| 571 |
+
"50323": {
|
| 572 |
+
"content": "[unused38]",
|
| 573 |
+
"lstrip": false,
|
| 574 |
+
"normalized": true,
|
| 575 |
+
"rstrip": false,
|
| 576 |
+
"single_word": false,
|
| 577 |
+
"special": false
|
| 578 |
+
},
|
| 579 |
+
"50324": {
|
| 580 |
+
"content": "[unused39]",
|
| 581 |
+
"lstrip": false,
|
| 582 |
+
"normalized": true,
|
| 583 |
+
"rstrip": false,
|
| 584 |
+
"single_word": false,
|
| 585 |
+
"special": false
|
| 586 |
+
},
|
| 587 |
+
"50325": {
|
| 588 |
+
"content": "[unused40]",
|
| 589 |
+
"lstrip": false,
|
| 590 |
+
"normalized": true,
|
| 591 |
+
"rstrip": false,
|
| 592 |
+
"single_word": false,
|
| 593 |
+
"special": false
|
| 594 |
+
},
|
| 595 |
+
"50326": {
|
| 596 |
+
"content": "[unused41]",
|
| 597 |
+
"lstrip": false,
|
| 598 |
+
"normalized": true,
|
| 599 |
+
"rstrip": false,
|
| 600 |
+
"single_word": false,
|
| 601 |
+
"special": false
|
| 602 |
+
},
|
| 603 |
+
"50327": {
|
| 604 |
+
"content": "[unused42]",
|
| 605 |
+
"lstrip": false,
|
| 606 |
+
"normalized": true,
|
| 607 |
+
"rstrip": false,
|
| 608 |
+
"single_word": false,
|
| 609 |
+
"special": false
|
| 610 |
+
},
|
| 611 |
+
"50328": {
|
| 612 |
+
"content": "[unused43]",
|
| 613 |
+
"lstrip": false,
|
| 614 |
+
"normalized": true,
|
| 615 |
+
"rstrip": false,
|
| 616 |
+
"single_word": false,
|
| 617 |
+
"special": false
|
| 618 |
+
},
|
| 619 |
+
"50329": {
|
| 620 |
+
"content": "[unused44]",
|
| 621 |
+
"lstrip": false,
|
| 622 |
+
"normalized": true,
|
| 623 |
+
"rstrip": false,
|
| 624 |
+
"single_word": false,
|
| 625 |
+
"special": false
|
| 626 |
+
},
|
| 627 |
+
"50330": {
|
| 628 |
+
"content": "[unused45]",
|
| 629 |
+
"lstrip": false,
|
| 630 |
+
"normalized": true,
|
| 631 |
+
"rstrip": false,
|
| 632 |
+
"single_word": false,
|
| 633 |
+
"special": false
|
| 634 |
+
},
|
| 635 |
+
"50331": {
|
| 636 |
+
"content": "[unused46]",
|
| 637 |
+
"lstrip": false,
|
| 638 |
+
"normalized": true,
|
| 639 |
+
"rstrip": false,
|
| 640 |
+
"single_word": false,
|
| 641 |
+
"special": false
|
| 642 |
+
},
|
| 643 |
+
"50332": {
|
| 644 |
+
"content": "[unused47]",
|
| 645 |
+
"lstrip": false,
|
| 646 |
+
"normalized": true,
|
| 647 |
+
"rstrip": false,
|
| 648 |
+
"single_word": false,
|
| 649 |
+
"special": false
|
| 650 |
+
},
|
| 651 |
+
"50333": {
|
| 652 |
+
"content": "[unused48]",
|
| 653 |
+
"lstrip": false,
|
| 654 |
+
"normalized": true,
|
| 655 |
+
"rstrip": false,
|
| 656 |
+
"single_word": false,
|
| 657 |
+
"special": false
|
| 658 |
+
},
|
| 659 |
+
"50334": {
|
| 660 |
+
"content": "[unused49]",
|
| 661 |
+
"lstrip": false,
|
| 662 |
+
"normalized": true,
|
| 663 |
+
"rstrip": false,
|
| 664 |
+
"single_word": false,
|
| 665 |
+
"special": false
|
| 666 |
+
},
|
| 667 |
+
"50335": {
|
| 668 |
+
"content": "[unused50]",
|
| 669 |
+
"lstrip": false,
|
| 670 |
+
"normalized": true,
|
| 671 |
+
"rstrip": false,
|
| 672 |
+
"single_word": false,
|
| 673 |
+
"special": false
|
| 674 |
+
},
|
| 675 |
+
"50336": {
|
| 676 |
+
"content": "[unused51]",
|
| 677 |
+
"lstrip": false,
|
| 678 |
+
"normalized": true,
|
| 679 |
+
"rstrip": false,
|
| 680 |
+
"single_word": false,
|
| 681 |
+
"special": false
|
| 682 |
+
},
|
| 683 |
+
"50337": {
|
| 684 |
+
"content": "[unused52]",
|
| 685 |
+
"lstrip": false,
|
| 686 |
+
"normalized": true,
|
| 687 |
+
"rstrip": false,
|
| 688 |
+
"single_word": false,
|
| 689 |
+
"special": false
|
| 690 |
+
},
|
| 691 |
+
"50338": {
|
| 692 |
+
"content": "[unused53]",
|
| 693 |
+
"lstrip": false,
|
| 694 |
+
"normalized": true,
|
| 695 |
+
"rstrip": false,
|
| 696 |
+
"single_word": false,
|
| 697 |
+
"special": false
|
| 698 |
+
},
|
| 699 |
+
"50339": {
|
| 700 |
+
"content": "[unused54]",
|
| 701 |
+
"lstrip": false,
|
| 702 |
+
"normalized": true,
|
| 703 |
+
"rstrip": false,
|
| 704 |
+
"single_word": false,
|
| 705 |
+
"special": false
|
| 706 |
+
},
|
| 707 |
+
"50340": {
|
| 708 |
+
"content": "[unused55]",
|
| 709 |
+
"lstrip": false,
|
| 710 |
+
"normalized": true,
|
| 711 |
+
"rstrip": false,
|
| 712 |
+
"single_word": false,
|
| 713 |
+
"special": false
|
| 714 |
+
},
|
| 715 |
+
"50341": {
|
| 716 |
+
"content": "[unused56]",
|
| 717 |
+
"lstrip": false,
|
| 718 |
+
"normalized": true,
|
| 719 |
+
"rstrip": false,
|
| 720 |
+
"single_word": false,
|
| 721 |
+
"special": false
|
| 722 |
+
},
|
| 723 |
+
"50342": {
|
| 724 |
+
"content": "[unused57]",
|
| 725 |
+
"lstrip": false,
|
| 726 |
+
"normalized": true,
|
| 727 |
+
"rstrip": false,
|
| 728 |
+
"single_word": false,
|
| 729 |
+
"special": false
|
| 730 |
+
},
|
| 731 |
+
"50343": {
|
| 732 |
+
"content": "[unused58]",
|
| 733 |
+
"lstrip": false,
|
| 734 |
+
"normalized": true,
|
| 735 |
+
"rstrip": false,
|
| 736 |
+
"single_word": false,
|
| 737 |
+
"special": false
|
| 738 |
+
},
|
| 739 |
+
"50344": {
|
| 740 |
+
"content": "[unused59]",
|
| 741 |
+
"lstrip": false,
|
| 742 |
+
"normalized": true,
|
| 743 |
+
"rstrip": false,
|
| 744 |
+
"single_word": false,
|
| 745 |
+
"special": false
|
| 746 |
+
},
|
| 747 |
+
"50345": {
|
| 748 |
+
"content": "[unused60]",
|
| 749 |
+
"lstrip": false,
|
| 750 |
+
"normalized": true,
|
| 751 |
+
"rstrip": false,
|
| 752 |
+
"single_word": false,
|
| 753 |
+
"special": false
|
| 754 |
+
},
|
| 755 |
+
"50346": {
|
| 756 |
+
"content": "[unused61]",
|
| 757 |
+
"lstrip": false,
|
| 758 |
+
"normalized": true,
|
| 759 |
+
"rstrip": false,
|
| 760 |
+
"single_word": false,
|
| 761 |
+
"special": false
|
| 762 |
+
},
|
| 763 |
+
"50347": {
|
| 764 |
+
"content": "[unused62]",
|
| 765 |
+
"lstrip": false,
|
| 766 |
+
"normalized": true,
|
| 767 |
+
"rstrip": false,
|
| 768 |
+
"single_word": false,
|
| 769 |
+
"special": false
|
| 770 |
+
},
|
| 771 |
+
"50348": {
|
| 772 |
+
"content": "[unused63]",
|
| 773 |
+
"lstrip": false,
|
| 774 |
+
"normalized": true,
|
| 775 |
+
"rstrip": false,
|
| 776 |
+
"single_word": false,
|
| 777 |
+
"special": false
|
| 778 |
+
},
|
| 779 |
+
"50349": {
|
| 780 |
+
"content": "[unused64]",
|
| 781 |
+
"lstrip": false,
|
| 782 |
+
"normalized": true,
|
| 783 |
+
"rstrip": false,
|
| 784 |
+
"single_word": false,
|
| 785 |
+
"special": false
|
| 786 |
+
},
|
| 787 |
+
"50350": {
|
| 788 |
+
"content": "[unused65]",
|
| 789 |
+
"lstrip": false,
|
| 790 |
+
"normalized": true,
|
| 791 |
+
"rstrip": false,
|
| 792 |
+
"single_word": false,
|
| 793 |
+
"special": false
|
| 794 |
+
},
|
| 795 |
+
"50351": {
|
| 796 |
+
"content": "[unused66]",
|
| 797 |
+
"lstrip": false,
|
| 798 |
+
"normalized": true,
|
| 799 |
+
"rstrip": false,
|
| 800 |
+
"single_word": false,
|
| 801 |
+
"special": false
|
| 802 |
+
},
|
| 803 |
+
"50352": {
|
| 804 |
+
"content": "[unused67]",
|
| 805 |
+
"lstrip": false,
|
| 806 |
+
"normalized": true,
|
| 807 |
+
"rstrip": false,
|
| 808 |
+
"single_word": false,
|
| 809 |
+
"special": false
|
| 810 |
+
},
|
| 811 |
+
"50353": {
|
| 812 |
+
"content": "[unused68]",
|
| 813 |
+
"lstrip": false,
|
| 814 |
+
"normalized": true,
|
| 815 |
+
"rstrip": false,
|
| 816 |
+
"single_word": false,
|
| 817 |
+
"special": false
|
| 818 |
+
},
|
| 819 |
+
"50354": {
|
| 820 |
+
"content": "[unused69]",
|
| 821 |
+
"lstrip": false,
|
| 822 |
+
"normalized": true,
|
| 823 |
+
"rstrip": false,
|
| 824 |
+
"single_word": false,
|
| 825 |
+
"special": false
|
| 826 |
+
},
|
| 827 |
+
"50355": {
|
| 828 |
+
"content": "[unused70]",
|
| 829 |
+
"lstrip": false,
|
| 830 |
+
"normalized": true,
|
| 831 |
+
"rstrip": false,
|
| 832 |
+
"single_word": false,
|
| 833 |
+
"special": false
|
| 834 |
+
},
|
| 835 |
+
"50356": {
|
| 836 |
+
"content": "[unused71]",
|
| 837 |
+
"lstrip": false,
|
| 838 |
+
"normalized": true,
|
| 839 |
+
"rstrip": false,
|
| 840 |
+
"single_word": false,
|
| 841 |
+
"special": false
|
| 842 |
+
},
|
| 843 |
+
"50357": {
|
| 844 |
+
"content": "[unused72]",
|
| 845 |
+
"lstrip": false,
|
| 846 |
+
"normalized": true,
|
| 847 |
+
"rstrip": false,
|
| 848 |
+
"single_word": false,
|
| 849 |
+
"special": false
|
| 850 |
+
},
|
| 851 |
+
"50358": {
|
| 852 |
+
"content": "[unused73]",
|
| 853 |
+
"lstrip": false,
|
| 854 |
+
"normalized": true,
|
| 855 |
+
"rstrip": false,
|
| 856 |
+
"single_word": false,
|
| 857 |
+
"special": false
|
| 858 |
+
},
|
| 859 |
+
"50359": {
|
| 860 |
+
"content": "[unused74]",
|
| 861 |
+
"lstrip": false,
|
| 862 |
+
"normalized": true,
|
| 863 |
+
"rstrip": false,
|
| 864 |
+
"single_word": false,
|
| 865 |
+
"special": false
|
| 866 |
+
},
|
| 867 |
+
"50360": {
|
| 868 |
+
"content": "[unused75]",
|
| 869 |
+
"lstrip": false,
|
| 870 |
+
"normalized": true,
|
| 871 |
+
"rstrip": false,
|
| 872 |
+
"single_word": false,
|
| 873 |
+
"special": false
|
| 874 |
+
},
|
| 875 |
+
"50361": {
|
| 876 |
+
"content": "[unused76]",
|
| 877 |
+
"lstrip": false,
|
| 878 |
+
"normalized": true,
|
| 879 |
+
"rstrip": false,
|
| 880 |
+
"single_word": false,
|
| 881 |
+
"special": false
|
| 882 |
+
},
|
| 883 |
+
"50362": {
|
| 884 |
+
"content": "[unused77]",
|
| 885 |
+
"lstrip": false,
|
| 886 |
+
"normalized": true,
|
| 887 |
+
"rstrip": false,
|
| 888 |
+
"single_word": false,
|
| 889 |
+
"special": false
|
| 890 |
+
},
|
| 891 |
+
"50363": {
|
| 892 |
+
"content": "[unused78]",
|
| 893 |
+
"lstrip": false,
|
| 894 |
+
"normalized": true,
|
| 895 |
+
"rstrip": false,
|
| 896 |
+
"single_word": false,
|
| 897 |
+
"special": false
|
| 898 |
+
},
|
| 899 |
+
"50364": {
|
| 900 |
+
"content": "[unused79]",
|
| 901 |
+
"lstrip": false,
|
| 902 |
+
"normalized": true,
|
| 903 |
+
"rstrip": false,
|
| 904 |
+
"single_word": false,
|
| 905 |
+
"special": false
|
| 906 |
+
},
|
| 907 |
+
"50365": {
|
| 908 |
+
"content": "[unused80]",
|
| 909 |
+
"lstrip": false,
|
| 910 |
+
"normalized": true,
|
| 911 |
+
"rstrip": false,
|
| 912 |
+
"single_word": false,
|
| 913 |
+
"special": false
|
| 914 |
+
},
|
| 915 |
+
"50366": {
|
| 916 |
+
"content": "[unused81]",
|
| 917 |
+
"lstrip": false,
|
| 918 |
+
"normalized": true,
|
| 919 |
+
"rstrip": false,
|
| 920 |
+
"single_word": false,
|
| 921 |
+
"special": false
|
| 922 |
+
},
|
| 923 |
+
"50367": {
|
| 924 |
+
"content": "[unused82]",
|
| 925 |
+
"lstrip": false,
|
| 926 |
+
"normalized": true,
|
| 927 |
+
"rstrip": false,
|
| 928 |
+
"single_word": false,
|
| 929 |
+
"special": false
|
| 930 |
+
}
|
| 931 |
+
},
|
| 932 |
+
"clean_up_tokenization_spaces": true,
|
| 933 |
+
"cls_token": "[CLS]",
|
| 934 |
+
"extra_special_tokens": {},
|
| 935 |
+
"mask_token": "[MASK]",
|
| 936 |
+
"model_input_names": [
|
| 937 |
+
"input_ids",
|
| 938 |
+
"attention_mask"
|
| 939 |
+
],
|
| 940 |
+
"model_max_length": 7999,
|
| 941 |
+
"pad_token": "[PAD]",
|
| 942 |
+
"sep_token": "[SEP]",
|
| 943 |
+
"tokenizer_class": "PreTrainedTokenizerFast",
|
| 944 |
+
"unk_token": "[UNK]"
|
| 945 |
+
}
|