diff --git a/ICM-LLaVA-v1.5-13B/config.json b/ICM-LLaVA-v1.5-13B/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..0c87cd07db6c3e77d85ed420520d37470a9ffc04
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/config.json
@@ -0,0 +1,49 @@
+{
+ "_name_or_path": "liuhaotian/llava-v1.5-13b",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "freeze_mm_vision_resampler": false,
+ "hidden_act": "silu",
+ "hidden_size": 5120,
+ "image_aspect_ratio": "pad",
+ "initializer_range": 0.02,
+ "intermediate_size": 13824,
+ "max_length": 4096,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1024,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_resampler_type": null,
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
+ "model_type": "llava_llama",
+ "num_attention_heads": 40,
+ "num_hidden_layers": 40,
+ "num_key_value_heads": 40,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 2048,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.37.2",
+ "tune_mm_mlp_adapter": false,
+ "tune_mm_vision_resampler": false,
+ "unfreeze_mm_vision_tower": false,
+ "use_cache": true,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/ICM-LLaVA-v1.5-13B/generation_config.json b/ICM-LLaVA-v1.5-13B/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..70ffa4e13b28eca9f452207a778bb73c036f3d03
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/generation_config.json
@@ -0,0 +1,8 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "transformers_version": "4.37.2"
+}
diff --git a/ICM-LLaVA-v1.5-13B/model-00001-of-00006.safetensors b/ICM-LLaVA-v1.5-13B/model-00001-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..fa4885fdb7760f74ac54b870709f7d788b0934ec
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/model-00001-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8396ef416922f96f6da9be9d1b94e5af28dbb7571117eb837a223e8e48693b13
+size 4978265800
diff --git a/ICM-LLaVA-v1.5-13B/model-00002-of-00006.safetensors b/ICM-LLaVA-v1.5-13B/model-00002-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..5d5e3d668642ca6e5bf577baff197dcbf2546a76
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/model-00002-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:0066a4b0c76cd970c3bb4f3af68a87396b605d91fa50bc465def9096fe7134fa
+size 4970422232
diff --git a/ICM-LLaVA-v1.5-13B/model-00003-of-00006.safetensors b/ICM-LLaVA-v1.5-13B/model-00003-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7cf520b7f71935232aee36bb87c702091f44caf8
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/model-00003-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:f45086082307521a1933bdc39235bc1e6b8e3fc728e544ab2a3d28d65ba2d4a6
+size 4970422256
diff --git a/ICM-LLaVA-v1.5-13B/model-00004-of-00006.safetensors b/ICM-LLaVA-v1.5-13B/model-00004-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..f465ad0a8c59e81c447c4ef0adeab61045f4fcd5
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/model-00004-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:edf84ccec24497f314e75f4e2cf4be8055d84bfbe6e009aa1123e67503f2f856
+size 4933701504
diff --git a/ICM-LLaVA-v1.5-13B/model-00005-of-00006.safetensors b/ICM-LLaVA-v1.5-13B/model-00005-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..eedc4f6a019de351d276557d91533c797d68c0dd
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/model-00005-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3f6a55388462fe91c91ec39a15a63244b23f1066684b814210c662e8f681bedf
+size 4933722216
diff --git a/ICM-LLaVA-v1.5-13B/model-00006-of-00006.safetensors b/ICM-LLaVA-v1.5-13B/model-00006-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..8e3b470d52715392386bbef3617c40ddf0972eda
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/model-00006-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:96aabac3044199a158ab971949bc344fd6412ad693015faf74c64432e1c38dcb
+size 1915248664
diff --git a/ICM-LLaVA-v1.5-13B/model.safetensors.index.json b/ICM-LLaVA-v1.5-13B/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..0b710ea4003a96fb2d14b8f7775e12bcfcfb7c2a
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/model.safetensors.index.json
@@ -0,0 +1,765 @@
+{
+ "metadata": {
+ "total_size": 26701678592
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00006-of-00006.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.mm_projector.0.bias": "model-00006-of-00006.safetensors",
+ "model.mm_projector.0.weight": "model-00006-of-00006.safetensors",
+ "model.mm_projector.2.bias": "model-00006-of-00006.safetensors",
+ "model.mm_projector.2.weight": "model-00006-of-00006.safetensors",
+ "model.norm.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.class_embedding": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.weight": "model-00006-of-00006.safetensors"
+ }
+}
diff --git a/ICM-LLaVA-v1.5-13B/runs/Jan19_23-20-31_hk01dgx042/events.out.tfevents.1737300335.hk01dgx042.3157865.0 b/ICM-LLaVA-v1.5-13B/runs/Jan19_23-20-31_hk01dgx042/events.out.tfevents.1737300335.hk01dgx042.3157865.0
new file mode 100644
index 0000000000000000000000000000000000000000..9ac0bd0db037178c3bb903cb9a02b9e705b8fd20
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/runs/Jan19_23-20-31_hk01dgx042/events.out.tfevents.1737300335.hk01dgx042.3157865.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:087b35a071950bc177c7669646a9a3618f8c708ff4a25b12a25ec20426aa20cf
+size 398828
diff --git a/ICM-LLaVA-v1.5-13B/runs/Jan20_08-43-10_hk01dgx042/events.out.tfevents.1737333831.hk01dgx042.3834051.0 b/ICM-LLaVA-v1.5-13B/runs/Jan20_08-43-10_hk01dgx042/events.out.tfevents.1737333831.hk01dgx042.3834051.0
new file mode 100644
index 0000000000000000000000000000000000000000..a597eed76e7525dbbb6407cf524c68837452b0b5
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/runs/Jan20_08-43-10_hk01dgx042/events.out.tfevents.1737333831.hk01dgx042.3834051.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:1e585f9955a8966bb2c5989655b910f979d785cf0d357595424cce631303500c
+size 37060
diff --git a/ICM-LLaVA-v1.5-13B/special_tokens_map.json b/ICM-LLaVA-v1.5-13B/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/ICM-LLaVA-v1.5-13B/tokenizer.model b/ICM-LLaVA-v1.5-13B/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/ICM-LLaVA-v1.5-13B/tokenizer_config.json b/ICM-LLaVA-v1.5-13B/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/ICM-LLaVA-v1.5-13B/trainer_state.json b/ICM-LLaVA-v1.5-13B/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..2b4fb833fd667a9fdf589a721720a8f910623d90
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/trainer_state.json
@@ -0,0 +1,15060 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.0,
+ "eval_steps": 500,
+ "global_step": 2505,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6315789473684213e-07,
+ "loss": 1.3667,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.263157894736843e-07,
+ "loss": 1.4669,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.894736842105263e-07,
+ "loss": 1.4032,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0526315789473685e-06,
+ "loss": 1.2917,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.3157894736842106e-06,
+ "loss": 1.395,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5789473684210526e-06,
+ "loss": 1.3943,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.8421052631578948e-06,
+ "loss": 1.3845,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.105263157894737e-06,
+ "loss": 1.2892,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.368421052631579e-06,
+ "loss": 1.2771,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.631578947368421e-06,
+ "loss": 1.2156,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8947368421052634e-06,
+ "loss": 1.215,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.157894736842105e-06,
+ "loss": 1.1859,
+ "step": 12
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.421052631578948e-06,
+ "loss": 1.0249,
+ "step": 13
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.6842105263157896e-06,
+ "loss": 1.0523,
+ "step": 14
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.947368421052632e-06,
+ "loss": 0.9712,
+ "step": 15
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.210526315789474e-06,
+ "loss": 1.042,
+ "step": 16
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.473684210526316e-06,
+ "loss": 1.022,
+ "step": 17
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.736842105263158e-06,
+ "loss": 0.8822,
+ "step": 18
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 0.9929,
+ "step": 19
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.263157894736842e-06,
+ "loss": 1.0309,
+ "step": 20
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.526315789473685e-06,
+ "loss": 0.9417,
+ "step": 21
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.789473684210527e-06,
+ "loss": 0.9038,
+ "step": 22
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.0526315789473685e-06,
+ "loss": 0.97,
+ "step": 23
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.31578947368421e-06,
+ "loss": 0.8542,
+ "step": 24
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.578947368421054e-06,
+ "loss": 0.851,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.842105263157896e-06,
+ "loss": 0.9293,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.1052631578947375e-06,
+ "loss": 0.9345,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.368421052631579e-06,
+ "loss": 0.8899,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.631578947368423e-06,
+ "loss": 0.9293,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.894736842105265e-06,
+ "loss": 0.9103,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.157894736842106e-06,
+ "loss": 0.9227,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.421052631578948e-06,
+ "loss": 0.8916,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.68421052631579e-06,
+ "loss": 0.8897,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.947368421052632e-06,
+ "loss": 0.8868,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.210526315789474e-06,
+ "loss": 0.8503,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.473684210526315e-06,
+ "loss": 0.8485,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.736842105263159e-06,
+ "loss": 0.6967,
+ "step": 37
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 0.8237,
+ "step": 38
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0263157894736844e-05,
+ "loss": 0.8865,
+ "step": 39
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0526315789473684e-05,
+ "loss": 0.8603,
+ "step": 40
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0789473684210528e-05,
+ "loss": 0.7898,
+ "step": 41
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.105263157894737e-05,
+ "loss": 0.8426,
+ "step": 42
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1315789473684212e-05,
+ "loss": 0.8137,
+ "step": 43
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1578947368421053e-05,
+ "loss": 0.7421,
+ "step": 44
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1842105263157895e-05,
+ "loss": 0.7519,
+ "step": 45
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2105263157894737e-05,
+ "loss": 0.8385,
+ "step": 46
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.236842105263158e-05,
+ "loss": 0.8534,
+ "step": 47
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.263157894736842e-05,
+ "loss": 0.8638,
+ "step": 48
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2894736842105264e-05,
+ "loss": 0.8566,
+ "step": 49
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3157894736842108e-05,
+ "loss": 0.8702,
+ "step": 50
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3421052631578948e-05,
+ "loss": 0.843,
+ "step": 51
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3684210526315791e-05,
+ "loss": 0.7185,
+ "step": 52
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3947368421052631e-05,
+ "loss": 0.8169,
+ "step": 53
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4210526315789475e-05,
+ "loss": 0.8247,
+ "step": 54
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4473684210526317e-05,
+ "loss": 0.7503,
+ "step": 55
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4736842105263159e-05,
+ "loss": 0.7951,
+ "step": 56
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 0.8168,
+ "step": 57
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5263157894736846e-05,
+ "loss": 0.8103,
+ "step": 58
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5526315789473686e-05,
+ "loss": 0.8028,
+ "step": 59
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.578947368421053e-05,
+ "loss": 0.8101,
+ "step": 60
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.605263157894737e-05,
+ "loss": 0.8169,
+ "step": 61
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6315789473684213e-05,
+ "loss": 0.8336,
+ "step": 62
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6578947368421053e-05,
+ "loss": 0.7442,
+ "step": 63
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6842105263157896e-05,
+ "loss": 0.7358,
+ "step": 64
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7105263157894737e-05,
+ "loss": 0.8073,
+ "step": 65
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.736842105263158e-05,
+ "loss": 0.8087,
+ "step": 66
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.763157894736842e-05,
+ "loss": 0.7804,
+ "step": 67
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7894736842105264e-05,
+ "loss": 0.8268,
+ "step": 68
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8157894736842107e-05,
+ "loss": 0.8519,
+ "step": 69
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8421052631578947e-05,
+ "loss": 0.7764,
+ "step": 70
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.868421052631579e-05,
+ "loss": 0.8059,
+ "step": 71
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.894736842105263e-05,
+ "loss": 0.8386,
+ "step": 72
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9210526315789474e-05,
+ "loss": 0.7393,
+ "step": 73
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9473684210526318e-05,
+ "loss": 0.7383,
+ "step": 74
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9736842105263158e-05,
+ "loss": 0.8369,
+ "step": 75
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 0.8179,
+ "step": 76
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999991635987763e-05,
+ "loss": 0.7915,
+ "step": 77
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999966543965042e-05,
+ "loss": 0.8061,
+ "step": 78
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999924723973812e-05,
+ "loss": 0.7748,
+ "step": 79
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999866176084026e-05,
+ "loss": 0.7575,
+ "step": 80
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999790900393628e-05,
+ "loss": 0.7751,
+ "step": 81
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999698897028537e-05,
+ "loss": 0.7951,
+ "step": 82
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999590166142656e-05,
+ "loss": 0.7897,
+ "step": 83
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999946470791787e-05,
+ "loss": 0.8516,
+ "step": 84
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999322522564048e-05,
+ "loss": 0.8401,
+ "step": 85
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999163610319035e-05,
+ "loss": 0.7802,
+ "step": 86
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998987971448664e-05,
+ "loss": 0.7852,
+ "step": 87
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998795606246738e-05,
+ "loss": 0.7662,
+ "step": 88
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998586515035053e-05,
+ "loss": 0.8532,
+ "step": 89
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998360698163375e-05,
+ "loss": 0.7588,
+ "step": 90
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999811815600945e-05,
+ "loss": 0.7607,
+ "step": 91
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997858888978997e-05,
+ "loss": 0.7544,
+ "step": 92
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999758289750573e-05,
+ "loss": 0.8162,
+ "step": 93
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999729018205132e-05,
+ "loss": 0.7875,
+ "step": 94
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996980743105427e-05,
+ "loss": 0.8886,
+ "step": 95
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999665458118568e-05,
+ "loss": 0.835,
+ "step": 96
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999631169683768e-05,
+ "loss": 0.7994,
+ "step": 97
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995952090635007e-05,
+ "loss": 0.8543,
+ "step": 98
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995575763179213e-05,
+ "loss": 0.777,
+ "step": 99
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999518271509982e-05,
+ "loss": 0.7948,
+ "step": 100
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999477294705431e-05,
+ "loss": 0.8141,
+ "step": 101
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999434645972816e-05,
+ "loss": 0.7906,
+ "step": 102
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999390325383479e-05,
+ "loss": 0.8265,
+ "step": 103
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993443330115592e-05,
+ "loss": 0.8766,
+ "step": 104
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992966689339936e-05,
+ "loss": 0.7439,
+ "step": 105
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992473332305145e-05,
+ "loss": 0.8201,
+ "step": 106
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991963259836504e-05,
+ "loss": 0.7336,
+ "step": 107
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991436472787267e-05,
+ "loss": 0.8224,
+ "step": 108
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990892972038646e-05,
+ "loss": 0.7889,
+ "step": 109
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990332758499805e-05,
+ "loss": 0.8431,
+ "step": 110
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989755833107875e-05,
+ "loss": 0.7188,
+ "step": 111
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998916219682794e-05,
+ "loss": 0.8327,
+ "step": 112
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998855185065303e-05,
+ "loss": 0.7889,
+ "step": 113
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998792479560414e-05,
+ "loss": 0.8218,
+ "step": 114
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987281032730206e-05,
+ "loss": 0.7737,
+ "step": 115
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986620563108117e-05,
+ "loss": 0.7578,
+ "step": 116
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985943387842704e-05,
+ "loss": 0.8361,
+ "step": 117
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985249508066754e-05,
+ "loss": 0.8202,
+ "step": 118
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984538924940987e-05,
+ "loss": 0.7849,
+ "step": 119
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998381163965407e-05,
+ "loss": 0.8505,
+ "step": 120
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983067653422603e-05,
+ "loss": 0.8631,
+ "step": 121
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982306967491136e-05,
+ "loss": 0.7784,
+ "step": 122
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998152958313214e-05,
+ "loss": 0.7732,
+ "step": 123
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980735501646026e-05,
+ "loss": 0.7643,
+ "step": 124
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997992472436114e-05,
+ "loss": 0.8322,
+ "step": 125
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979097252633748e-05,
+ "loss": 0.779,
+ "step": 126
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978253087848046e-05,
+ "loss": 0.8101,
+ "step": 127
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977392231416158e-05,
+ "loss": 0.809,
+ "step": 128
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976514684778124e-05,
+ "loss": 0.8142,
+ "step": 129
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9975620449401913e-05,
+ "loss": 0.7599,
+ "step": 130
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997470952678339e-05,
+ "loss": 0.7742,
+ "step": 131
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973781918446363e-05,
+ "loss": 0.8018,
+ "step": 132
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972837625942533e-05,
+ "loss": 0.7887,
+ "step": 133
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997187665085151e-05,
+ "loss": 0.7791,
+ "step": 134
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997089899478082e-05,
+ "loss": 0.7995,
+ "step": 135
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969904659365887e-05,
+ "loss": 0.7661,
+ "step": 136
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968893646270037e-05,
+ "loss": 0.8295,
+ "step": 137
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99678659571845e-05,
+ "loss": 0.8073,
+ "step": 138
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9966821593828393e-05,
+ "loss": 0.8292,
+ "step": 139
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996576055794873e-05,
+ "loss": 0.7534,
+ "step": 140
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996468285132041e-05,
+ "loss": 0.7811,
+ "step": 141
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9963588475746233e-05,
+ "loss": 0.8424,
+ "step": 142
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962477433056864e-05,
+ "loss": 0.7954,
+ "step": 143
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996134972511086e-05,
+ "loss": 0.7963,
+ "step": 144
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996020535379466e-05,
+ "loss": 0.7631,
+ "step": 145
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959044321022563e-05,
+ "loss": 0.8193,
+ "step": 146
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957866628736756e-05,
+ "loss": 0.7911,
+ "step": 147
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956672278907273e-05,
+ "loss": 0.8448,
+ "step": 148
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955461273532037e-05,
+ "loss": 0.7566,
+ "step": 149
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954233614636817e-05,
+ "loss": 0.8123,
+ "step": 150
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995298930427524e-05,
+ "loss": 0.7874,
+ "step": 151
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99517283445288e-05,
+ "loss": 0.768,
+ "step": 152
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950450737506825e-05,
+ "loss": 0.7528,
+ "step": 153
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949156485346502e-05,
+ "loss": 0.8067,
+ "step": 154
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994784559021286e-05,
+ "loss": 0.8375,
+ "step": 155
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946518054298768e-05,
+ "loss": 0.8129,
+ "step": 156
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994517387982493e-05,
+ "loss": 0.7765,
+ "step": 157
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943813069039883e-05,
+ "loss": 0.8144,
+ "step": 158
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994243562422e-05,
+ "loss": 0.7728,
+ "step": 159
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941041547669467e-05,
+ "loss": 0.8061,
+ "step": 160
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939630841720302e-05,
+ "loss": 0.7803,
+ "step": 161
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993820350873234e-05,
+ "loss": 0.7117,
+ "step": 162
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993675955109322e-05,
+ "loss": 0.7394,
+ "step": 163
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993529897121841e-05,
+ "loss": 0.7994,
+ "step": 164
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993382177155116e-05,
+ "loss": 0.7255,
+ "step": 165
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993232795456254e-05,
+ "loss": 0.7427,
+ "step": 166
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930817522751403e-05,
+ "loss": 0.7559,
+ "step": 167
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9929290478644412e-05,
+ "loss": 0.7892,
+ "step": 168
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927746824796003e-05,
+ "loss": 0.7877,
+ "step": 169
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992618656378841e-05,
+ "loss": 0.826,
+ "step": 170
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924609698231637e-05,
+ "loss": 0.7785,
+ "step": 171
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992301623076347e-05,
+ "loss": 0.7688,
+ "step": 172
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921406164049467e-05,
+ "loss": 0.7849,
+ "step": 173
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991977950078295e-05,
+ "loss": 0.7868,
+ "step": 174
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918136243685003e-05,
+ "loss": 0.7601,
+ "step": 175
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916476395504473e-05,
+ "loss": 0.757,
+ "step": 176
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991479995901796e-05,
+ "loss": 0.7602,
+ "step": 177
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991310693702981e-05,
+ "loss": 0.8142,
+ "step": 178
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991139733237211e-05,
+ "loss": 0.7908,
+ "step": 179
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9909671147904693e-05,
+ "loss": 0.7681,
+ "step": 180
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907928386515126e-05,
+ "loss": 0.8557,
+ "step": 181
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990616905111871e-05,
+ "loss": 0.8016,
+ "step": 182
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904393144658455e-05,
+ "loss": 0.7459,
+ "step": 183
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902600670105107e-05,
+ "loss": 0.7695,
+ "step": 184
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900791630457122e-05,
+ "loss": 0.7501,
+ "step": 185
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9898966028740662e-05,
+ "loss": 0.8202,
+ "step": 186
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897123868009606e-05,
+ "loss": 0.7029,
+ "step": 187
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9895265151345516e-05,
+ "loss": 0.8524,
+ "step": 188
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893389881857665e-05,
+ "loss": 0.8094,
+ "step": 189
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891498062683005e-05,
+ "loss": 0.768,
+ "step": 190
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9889589696986173e-05,
+ "loss": 0.8333,
+ "step": 191
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9887664787959495e-05,
+ "loss": 0.7184,
+ "step": 192
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988572333882296e-05,
+ "loss": 0.7137,
+ "step": 193
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9883765352824224e-05,
+ "loss": 0.7936,
+ "step": 194
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881790833238615e-05,
+ "loss": 0.7713,
+ "step": 195
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9879799783369117e-05,
+ "loss": 0.7762,
+ "step": 196
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987779220654636e-05,
+ "loss": 0.7531,
+ "step": 197
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875768106128627e-05,
+ "loss": 0.7284,
+ "step": 198
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987372748550183e-05,
+ "loss": 0.7663,
+ "step": 199
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987167034807953e-05,
+ "loss": 0.7214,
+ "step": 200
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869596697302912e-05,
+ "loss": 0.7447,
+ "step": 201
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9867506536640782e-05,
+ "loss": 0.7819,
+ "step": 202
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9865399869589565e-05,
+ "loss": 0.685,
+ "step": 203
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863276699673305e-05,
+ "loss": 0.7767,
+ "step": 204
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9861137030443638e-05,
+ "loss": 0.7879,
+ "step": 205
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985898086547981e-05,
+ "loss": 0.7345,
+ "step": 206
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856808208388663e-05,
+ "loss": 0.8273,
+ "step": 207
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985461906280462e-05,
+ "loss": 0.7524,
+ "step": 208
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852413432389685e-05,
+ "loss": 0.759,
+ "step": 209
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985019132083345e-05,
+ "loss": 0.793,
+ "step": 210
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847952731853067e-05,
+ "loss": 0.8179,
+ "step": 211
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9845697669193248e-05,
+ "loss": 0.7947,
+ "step": 212
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984342613662627e-05,
+ "loss": 0.7698,
+ "step": 213
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984113813795196e-05,
+ "loss": 0.7553,
+ "step": 214
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9838833676997687e-05,
+ "loss": 0.811,
+ "step": 215
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836512757618355e-05,
+ "loss": 0.738,
+ "step": 216
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983417538369641e-05,
+ "loss": 0.738,
+ "step": 217
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831821559141817e-05,
+ "loss": 0.8366,
+ "step": 218
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829451287892048e-05,
+ "loss": 0.7353,
+ "step": 219
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9827064573912113e-05,
+ "loss": 0.833,
+ "step": 220
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824661421194506e-05,
+ "loss": 0.7176,
+ "step": 221
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241833759226e-05,
+ "loss": 0.763,
+ "step": 222
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819805815653768e-05,
+ "loss": 0.7626,
+ "step": 223
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817353370953105e-05,
+ "loss": 0.8586,
+ "step": 224
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9814884503759698e-05,
+ "loss": 0.8328,
+ "step": 225
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812399218203467e-05,
+ "loss": 0.8083,
+ "step": 226
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809897518441813e-05,
+ "loss": 0.7878,
+ "step": 227
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980737940865958e-05,
+ "loss": 0.7842,
+ "step": 228
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9804844893069063e-05,
+ "loss": 0.831,
+ "step": 229
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9802293975910016e-05,
+ "loss": 0.7448,
+ "step": 230
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979972666144961e-05,
+ "loss": 0.8183,
+ "step": 231
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9797142953982463e-05,
+ "loss": 0.755,
+ "step": 232
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794542857830605e-05,
+ "loss": 0.7549,
+ "step": 233
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979192637734348e-05,
+ "loss": 0.7699,
+ "step": 234
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789293516897947e-05,
+ "loss": 0.7959,
+ "step": 235
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9786644280898258e-05,
+ "loss": 0.788,
+ "step": 236
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9783978673776067e-05,
+ "loss": 0.7227,
+ "step": 237
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9781296699990398e-05,
+ "loss": 0.7571,
+ "step": 238
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977859836402767e-05,
+ "loss": 0.7468,
+ "step": 239
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775883670401667e-05,
+ "loss": 0.7511,
+ "step": 240
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773152623653535e-05,
+ "loss": 0.7805,
+ "step": 241
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977040522835177e-05,
+ "loss": 0.8516,
+ "step": 242
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767641489092232e-05,
+ "loss": 0.7001,
+ "step": 243
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97648614104981e-05,
+ "loss": 0.7487,
+ "step": 244
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97620649972199e-05,
+ "loss": 0.7447,
+ "step": 245
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9759252253935485e-05,
+ "loss": 0.7712,
+ "step": 246
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975642318535001e-05,
+ "loss": 0.7241,
+ "step": 247
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975357779619595e-05,
+ "loss": 0.8578,
+ "step": 248
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750716091233083e-05,
+ "loss": 0.7485,
+ "step": 249
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974783807524847e-05,
+ "loss": 0.8312,
+ "step": 250
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974494375305647e-05,
+ "loss": 0.7677,
+ "step": 251
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974203312949871e-05,
+ "loss": 0.7512,
+ "step": 252
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9739106209444087e-05,
+ "loss": 0.7521,
+ "step": 253
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736162997788757e-05,
+ "loss": 0.7333,
+ "step": 254
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733203499456136e-05,
+ "loss": 0.7389,
+ "step": 255
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9730227719396883e-05,
+ "loss": 0.7762,
+ "step": 256
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9727235662588882e-05,
+ "loss": 0.7512,
+ "step": 257
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9724227334037255e-05,
+ "loss": 0.7057,
+ "step": 258
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9721202738774346e-05,
+ "loss": 0.7984,
+ "step": 259
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9718161881859703e-05,
+ "loss": 0.822,
+ "step": 260
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9715104768380078e-05,
+ "loss": 0.762,
+ "step": 261
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971203140344942e-05,
+ "loss": 0.7968,
+ "step": 262
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9708941792208857e-05,
+ "loss": 0.7697,
+ "step": 263
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9705835939826704e-05,
+ "loss": 0.7772,
+ "step": 264
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9702713851498436e-05,
+ "loss": 0.7581,
+ "step": 265
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9699575532446688e-05,
+ "loss": 0.7487,
+ "step": 266
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969642098792125e-05,
+ "loss": 0.6632,
+ "step": 267
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693250223199053e-05,
+ "loss": 0.7478,
+ "step": 268
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690063243584153e-05,
+ "loss": 0.8053,
+ "step": 269
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686860054407745e-05,
+ "loss": 0.7582,
+ "step": 270
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968364066102813e-05,
+ "loss": 0.752,
+ "step": 271
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9680405068830716e-05,
+ "loss": 0.8279,
+ "step": 272
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967715328322801e-05,
+ "loss": 0.7099,
+ "step": 273
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967388530965961e-05,
+ "loss": 0.7939,
+ "step": 274
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967060115359218e-05,
+ "loss": 0.8033,
+ "step": 275
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9667300820519478e-05,
+ "loss": 0.8189,
+ "step": 276
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96639843159623e-05,
+ "loss": 0.7836,
+ "step": 277
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9660651645468504e-05,
+ "loss": 0.7983,
+ "step": 278
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965730281461299e-05,
+ "loss": 0.7193,
+ "step": 279
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9653937828997694e-05,
+ "loss": 0.7305,
+ "step": 280
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965055669425157e-05,
+ "loss": 0.782,
+ "step": 281
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9647159416030588e-05,
+ "loss": 0.8005,
+ "step": 282
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9643746000017722e-05,
+ "loss": 0.7896,
+ "step": 283
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9640316451922943e-05,
+ "loss": 0.7741,
+ "step": 284
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636870777483207e-05,
+ "loss": 0.7602,
+ "step": 285
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633408982462453e-05,
+ "loss": 0.7443,
+ "step": 286
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629931072651572e-05,
+ "loss": 0.7595,
+ "step": 287
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9626437053868423e-05,
+ "loss": 0.788,
+ "step": 288
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962292693195781e-05,
+ "loss": 0.753,
+ "step": 289
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9619400712791473e-05,
+ "loss": 0.7471,
+ "step": 290
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961585840226808e-05,
+ "loss": 0.7739,
+ "step": 291
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9612300006313216e-05,
+ "loss": 0.7573,
+ "step": 292
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9608725530879376e-05,
+ "loss": 0.7548,
+ "step": 293
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960513498194595e-05,
+ "loss": 0.7171,
+ "step": 294
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9601528365519218e-05,
+ "loss": 0.7282,
+ "step": 295
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597905687632336e-05,
+ "loss": 0.7544,
+ "step": 296
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959426695434533e-05,
+ "loss": 0.7981,
+ "step": 297
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590612171745076e-05,
+ "loss": 0.8163,
+ "step": 298
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958694134594531e-05,
+ "loss": 0.7772,
+ "step": 299
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95832544830866e-05,
+ "loss": 0.7725,
+ "step": 300
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579551589336333e-05,
+ "loss": 0.7704,
+ "step": 301
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9575832670888723e-05,
+ "loss": 0.7451,
+ "step": 302
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957209773396478e-05,
+ "loss": 0.8181,
+ "step": 303
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956834678481232e-05,
+ "loss": 0.7521,
+ "step": 304
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9564579829705945e-05,
+ "loss": 0.8028,
+ "step": 305
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9560796874947016e-05,
+ "loss": 0.6878,
+ "step": 306
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9556997926863676e-05,
+ "loss": 0.7599,
+ "step": 307
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9553182991810812e-05,
+ "loss": 0.6992,
+ "step": 308
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954935207617006e-05,
+ "loss": 0.7747,
+ "step": 309
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954550518634978e-05,
+ "loss": 0.7928,
+ "step": 310
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541642328785065e-05,
+ "loss": 0.7757,
+ "step": 311
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537763509937707e-05,
+ "loss": 0.7845,
+ "step": 312
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533868736296205e-05,
+ "loss": 0.7239,
+ "step": 313
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529958014375748e-05,
+ "loss": 0.7212,
+ "step": 314
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95260313507182e-05,
+ "loss": 0.7527,
+ "step": 315
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952208875189209e-05,
+ "loss": 0.7617,
+ "step": 316
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518130224492612e-05,
+ "loss": 0.7371,
+ "step": 317
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9514155775141602e-05,
+ "loss": 0.766,
+ "step": 318
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9510165410487524e-05,
+ "loss": 0.7363,
+ "step": 319
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950615913720547e-05,
+ "loss": 0.7198,
+ "step": 320
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9502136961997144e-05,
+ "loss": 0.7836,
+ "step": 321
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9498098891590855e-05,
+ "loss": 0.7969,
+ "step": 322
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9494044932741488e-05,
+ "loss": 0.7675,
+ "step": 323
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948997509223052e-05,
+ "loss": 0.7349,
+ "step": 324
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485889376865998e-05,
+ "loss": 0.6925,
+ "step": 325
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481787793482503e-05,
+ "loss": 0.7556,
+ "step": 326
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9477670348941182e-05,
+ "loss": 0.7757,
+ "step": 327
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473537050129704e-05,
+ "loss": 0.8299,
+ "step": 328
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469387903962263e-05,
+ "loss": 0.7939,
+ "step": 329
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946522291737956e-05,
+ "loss": 0.6782,
+ "step": 330
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461042097348796e-05,
+ "loss": 0.7904,
+ "step": 331
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9456845450863654e-05,
+ "loss": 0.7802,
+ "step": 332
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9452632984944295e-05,
+ "loss": 0.7697,
+ "step": 333
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9448404706637344e-05,
+ "loss": 0.7577,
+ "step": 334
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9444160623015877e-05,
+ "loss": 0.7912,
+ "step": 335
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94399007411794e-05,
+ "loss": 0.835,
+ "step": 336
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9435625068253863e-05,
+ "loss": 0.7906,
+ "step": 337
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9431333611391615e-05,
+ "loss": 0.7534,
+ "step": 338
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.942702637777142e-05,
+ "loss": 0.8155,
+ "step": 339
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422703374598425e-05,
+ "loss": 0.7933,
+ "step": 340
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9418364609104166e-05,
+ "loss": 0.7518,
+ "step": 341
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9414010088546535e-05,
+ "loss": 0.7631,
+ "step": 342
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.940963982020979e-05,
+ "loss": 0.7871,
+ "step": 343
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405253811404522e-05,
+ "loss": 0.8125,
+ "step": 344
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9400852069467657e-05,
+ "loss": 0.7913,
+ "step": 345
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9396434601762444e-05,
+ "loss": 0.7237,
+ "step": 346
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939200141567843e-05,
+ "loss": 0.7541,
+ "step": 347
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938755251863146e-05,
+ "loss": 0.7267,
+ "step": 348
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9383087918063662e-05,
+ "loss": 0.7884,
+ "step": 349
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937860762144343e-05,
+ "loss": 0.6844,
+ "step": 350
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9374111636265412e-05,
+ "loss": 0.6843,
+ "step": 351
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9369599970050506e-05,
+ "loss": 0.7885,
+ "step": 352
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9365072630345838e-05,
+ "loss": 0.7332,
+ "step": 353
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360529624724754e-05,
+ "loss": 0.7237,
+ "step": 354
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9355970960786803e-05,
+ "loss": 0.7703,
+ "step": 355
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935139664615773e-05,
+ "loss": 0.7239,
+ "step": 356
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934680668848946e-05,
+ "loss": 0.8067,
+ "step": 357
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9342201095460083e-05,
+ "loss": 0.7498,
+ "step": 358
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933757987477385e-05,
+ "loss": 0.7512,
+ "step": 359
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933294303416115e-05,
+ "loss": 0.8152,
+ "step": 360
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9328290581378497e-05,
+ "loss": 0.7584,
+ "step": 361
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932362252420853e-05,
+ "loss": 0.7165,
+ "step": 362
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318938870459984e-05,
+ "loss": 0.7321,
+ "step": 363
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9314239627967688e-05,
+ "loss": 0.8046,
+ "step": 364
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309524804592545e-05,
+ "loss": 0.773,
+ "step": 365
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9304794408221524e-05,
+ "loss": 0.6992,
+ "step": 366
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9300048446767643e-05,
+ "loss": 0.75,
+ "step": 367
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528692816996e-05,
+ "loss": 0.7391,
+ "step": 368
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929050986039355e-05,
+ "loss": 0.7818,
+ "step": 369
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9285717251429507e-05,
+ "loss": 0.7756,
+ "step": 370
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928090910929492e-05,
+ "loss": 0.7725,
+ "step": 371
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927608544203286e-05,
+ "loss": 0.7654,
+ "step": 372
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9271246257712368e-05,
+ "loss": 0.7185,
+ "step": 373
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.926639156442845e-05,
+ "loss": 0.7626,
+ "step": 374
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9261521370302035e-05,
+ "loss": 0.7868,
+ "step": 375
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925663568348001e-05,
+ "loss": 0.7442,
+ "step": 376
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9251734512135155e-05,
+ "loss": 0.7183,
+ "step": 377
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246817864466166e-05,
+ "loss": 0.8302,
+ "step": 378
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241885748697618e-05,
+ "loss": 0.7701,
+ "step": 379
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236938173079974e-05,
+ "loss": 0.7761,
+ "step": 380
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231975145889543e-05,
+ "loss": 0.8344,
+ "step": 381
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9226996675428497e-05,
+ "loss": 0.8058,
+ "step": 382
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222002770024827e-05,
+ "loss": 0.7717,
+ "step": 383
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921699343803235e-05,
+ "loss": 0.7319,
+ "step": 384
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9211968687830693e-05,
+ "loss": 0.6841,
+ "step": 385
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9206928527825268e-05,
+ "loss": 0.7784,
+ "step": 386
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920187296644727e-05,
+ "loss": 0.749,
+ "step": 387
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.919680201215365e-05,
+ "loss": 0.7448,
+ "step": 388
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9191715673427113e-05,
+ "loss": 0.6588,
+ "step": 389
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.91866139587761e-05,
+ "loss": 0.7127,
+ "step": 390
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.918149687673478e-05,
+ "loss": 0.772,
+ "step": 391
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9176364435863005e-05,
+ "loss": 0.7335,
+ "step": 392
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9171216644746345e-05,
+ "loss": 0.7289,
+ "step": 393
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9166053511996035e-05,
+ "loss": 0.6897,
+ "step": 394
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160875046248974e-05,
+ "loss": 0.6907,
+ "step": 395
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915568125616772e-05,
+ "loss": 0.6756,
+ "step": 396
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150472150440443e-05,
+ "loss": 0.7353,
+ "step": 397
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145247737780963e-05,
+ "loss": 0.6975,
+ "step": 398
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9140008026928683e-05,
+ "loss": 0.7623,
+ "step": 399
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9134753026648605e-05,
+ "loss": 0.835,
+ "step": 400
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9129482745731308e-05,
+ "loss": 0.8391,
+ "step": 401
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9124197192992927e-05,
+ "loss": 0.7046,
+ "step": 402
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.911889637727515e-05,
+ "loss": 0.788,
+ "step": 403
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9113580307445196e-05,
+ "loss": 0.7555,
+ "step": 404
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9108248992395797e-05,
+ "loss": 0.7918,
+ "step": 405
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910290244104519e-05,
+ "loss": 0.7241,
+ "step": 406
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097540662337102e-05,
+ "loss": 0.7386,
+ "step": 407
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9092163665240726e-05,
+ "loss": 0.7517,
+ "step": 408
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9086771458750716e-05,
+ "loss": 0.7325,
+ "step": 409
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081364051887173e-05,
+ "loss": 0.8032,
+ "step": 410
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9075941453695617e-05,
+ "loss": 0.7595,
+ "step": 411
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9070503673246982e-05,
+ "loss": 0.7593,
+ "step": 412
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9065050719637604e-05,
+ "loss": 0.7957,
+ "step": 413
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9059582601989196e-05,
+ "loss": 0.7264,
+ "step": 414
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9054099329448835e-05,
+ "loss": 0.7653,
+ "step": 415
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904860091118896e-05,
+ "loss": 0.737,
+ "step": 416
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904308735640733e-05,
+ "loss": 0.7475,
+ "step": 417
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9037558674327036e-05,
+ "loss": 0.7614,
+ "step": 418
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9032014874196476e-05,
+ "loss": 0.6836,
+ "step": 419
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9026455965289326e-05,
+ "loss": 0.7826,
+ "step": 420
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020881956904543e-05,
+ "loss": 0.7697,
+ "step": 421
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901529285836635e-05,
+ "loss": 0.6963,
+ "step": 422
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900968867902419e-05,
+ "loss": 0.7663,
+ "step": 423
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004069428252765e-05,
+ "loss": 0.7662,
+ "step": 424
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899843511545196e-05,
+ "loss": 0.7335,
+ "step": 425
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8992785750046866e-05,
+ "loss": 0.8018,
+ "step": 426
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.898712134148776e-05,
+ "loss": 0.7281,
+ "step": 427
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8981441899250082e-05,
+ "loss": 0.8408,
+ "step": 428
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897574743283441e-05,
+ "loss": 0.7273,
+ "step": 429
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8970037951766468e-05,
+ "loss": 0.7767,
+ "step": 430
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896431346559708e-05,
+ "loss": 0.7087,
+ "step": 431
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895857398390219e-05,
+ "loss": 0.7702,
+ "step": 432
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895281951628281e-05,
+ "loss": 0.8321,
+ "step": 433
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8947050072365033e-05,
+ "loss": 0.7266,
+ "step": 434
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89412656618e-05,
+ "loss": 0.768,
+ "step": 435
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935466294263876e-05,
+ "loss": 0.7135,
+ "step": 436
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8929651979457874e-05,
+ "loss": 0.7761,
+ "step": 437
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.892382272710818e-05,
+ "loss": 0.804,
+ "step": 438
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891797854696599e-05,
+ "loss": 0.7114,
+ "step": 439
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891211944880746e-05,
+ "loss": 0.701,
+ "step": 440
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.89062454424337e-05,
+ "loss": 0.7672,
+ "step": 441
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8900356537670768e-05,
+ "loss": 0.7597,
+ "step": 442
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8894452744369632e-05,
+ "loss": 0.7473,
+ "step": 443
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8888534072406182e-05,
+ "loss": 0.6375,
+ "step": 444
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.888260053168118e-05,
+ "loss": 0.764,
+ "step": 445
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876652132120267e-05,
+ "loss": 0.778,
+ "step": 446
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.887068888367394e-05,
+ "loss": 0.7661,
+ "step": 447
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864710796317537e-05,
+ "loss": 0.7281,
+ "step": 448
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885871788005122e-05,
+ "loss": 0.738,
+ "step": 449
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8852710144899948e-05,
+ "loss": 0.7972,
+ "step": 450
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884668760091348e-05,
+ "loss": 0.7988,
+ "step": 451
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8840650258166343e-05,
+ "loss": 0.7064,
+ "step": 452
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8834598126757812e-05,
+ "loss": 0.7231,
+ "step": 453
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8828531216811912e-05,
+ "loss": 0.6869,
+ "step": 454
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882244953847739e-05,
+ "loss": 0.811,
+ "step": 455
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8816353101927686e-05,
+ "loss": 0.7511,
+ "step": 456
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881024191736093e-05,
+ "loss": 0.7783,
+ "step": 457
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8804115994999933e-05,
+ "loss": 0.7196,
+ "step": 458
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797975345092153e-05,
+ "loss": 0.7194,
+ "step": 459
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791819977909682e-05,
+ "loss": 0.778,
+ "step": 460
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785649903749236e-05,
+ "loss": 0.7429,
+ "step": 461
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8779465132932124e-05,
+ "loss": 0.7562,
+ "step": 462
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877326567580425e-05,
+ "loss": 0.6943,
+ "step": 463
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8767051542736084e-05,
+ "loss": 0.7429,
+ "step": 464
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8760822744122637e-05,
+ "loss": 0.7951,
+ "step": 465
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8754579290383465e-05,
+ "loss": 0.7621,
+ "step": 466
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8748321191962625e-05,
+ "loss": 0.7538,
+ "step": 467
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8742048459328684e-05,
+ "loss": 0.6886,
+ "step": 468
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8735761102974684e-05,
+ "loss": 0.8096,
+ "step": 469
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872945913341813e-05,
+ "loss": 0.7334,
+ "step": 470
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8723142561200973e-05,
+ "loss": 0.7219,
+ "step": 471
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.871681139688959e-05,
+ "loss": 0.7755,
+ "step": 472
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710465651074766e-05,
+ "loss": 0.7438,
+ "step": 473
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8704105334371683e-05,
+ "loss": 0.7262,
+ "step": 474
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697730457419893e-05,
+ "loss": 0.7319,
+ "step": 475
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691341030883306e-05,
+ "loss": 0.7693,
+ "step": 476
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868493706545017e-05,
+ "loss": 0.7069,
+ "step": 477
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8678518571833054e-05,
+ "loss": 0.7373,
+ "step": 478
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.867208556076883e-05,
+ "loss": 0.7819,
+ "step": 479
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866563804301866e-05,
+ "loss": 0.7415,
+ "step": 480
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8659176029367957e-05,
+ "loss": 0.7775,
+ "step": 481
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8652699530626398e-05,
+ "loss": 0.769,
+ "step": 482
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8646208557627888e-05,
+ "loss": 0.7343,
+ "step": 483
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8639703121230542e-05,
+ "loss": 0.8211,
+ "step": 484
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8633183232316668e-05,
+ "loss": 0.7205,
+ "step": 485
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626648901792753e-05,
+ "loss": 0.7409,
+ "step": 486
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8620100140589443e-05,
+ "loss": 0.747,
+ "step": 487
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8613536959661518e-05,
+ "loss": 0.7558,
+ "step": 488
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8606959369987885e-05,
+ "loss": 0.7578,
+ "step": 489
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.860036738257155e-05,
+ "loss": 0.7971,
+ "step": 490
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8593761008439614e-05,
+ "loss": 0.7264,
+ "step": 491
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8587140258643225e-05,
+ "loss": 0.7748,
+ "step": 492
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8580505144257595e-05,
+ "loss": 0.7242,
+ "step": 493
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857385567638196e-05,
+ "loss": 0.7837,
+ "step": 494
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8567191866139564e-05,
+ "loss": 0.7684,
+ "step": 495
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8560513724677644e-05,
+ "loss": 0.7384,
+ "step": 496
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8553821263167417e-05,
+ "loss": 0.7723,
+ "step": 497
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8547114492804042e-05,
+ "loss": 0.7529,
+ "step": 498
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8540393424806627e-05,
+ "loss": 0.7458,
+ "step": 499
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8533658070418186e-05,
+ "loss": 0.7551,
+ "step": 500
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.852690844090564e-05,
+ "loss": 0.7839,
+ "step": 501
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520144547559783e-05,
+ "loss": 0.8113,
+ "step": 502
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513366401695277e-05,
+ "loss": 0.7797,
+ "step": 503
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8506574014650613e-05,
+ "loss": 0.6893,
+ "step": 504
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849976739778812e-05,
+ "loss": 0.7333,
+ "step": 505
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849294656249392e-05,
+ "loss": 0.8052,
+ "step": 506
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8486111520177925e-05,
+ "loss": 0.7489,
+ "step": 507
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8479262282273805e-05,
+ "loss": 0.7933,
+ "step": 508
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8472398860238988e-05,
+ "loss": 0.8037,
+ "step": 509
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8465521265554623e-05,
+ "loss": 0.6913,
+ "step": 510
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8458629509725565e-05,
+ "loss": 0.8008,
+ "step": 511
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845172360428036e-05,
+ "loss": 0.7656,
+ "step": 512
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444803560771222e-05,
+ "loss": 0.7666,
+ "step": 513
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8437869390774022e-05,
+ "loss": 0.722,
+ "step": 514
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8430921105888254e-05,
+ "loss": 0.7097,
+ "step": 515
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.842395871773702e-05,
+ "loss": 0.7689,
+ "step": 516
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841698223796703e-05,
+ "loss": 0.7445,
+ "step": 517
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8409991678248548e-05,
+ "loss": 0.6917,
+ "step": 518
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8402987050275408e-05,
+ "loss": 0.6971,
+ "step": 519
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.839596836576496e-05,
+ "loss": 0.7321,
+ "step": 520
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838893563645808e-05,
+ "loss": 0.7042,
+ "step": 521
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8381888874119135e-05,
+ "loss": 0.7571,
+ "step": 522
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837482809053597e-05,
+ "loss": 0.7878,
+ "step": 523
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8367753297519874e-05,
+ "loss": 0.7135,
+ "step": 524
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360664506905587e-05,
+ "loss": 0.7129,
+ "step": 525
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353561730551243e-05,
+ "loss": 0.7638,
+ "step": 526
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346444980338394e-05,
+ "loss": 0.7766,
+ "step": 527
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339314268171953e-05,
+ "loss": 0.7465,
+ "step": 528
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8332169605980193e-05,
+ "loss": 0.7516,
+ "step": 529
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8325011005714726e-05,
+ "loss": 0.6397,
+ "step": 530
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8317838479350473e-05,
+ "loss": 0.7531,
+ "step": 531
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.831065203888565e-05,
+ "loss": 0.7786,
+ "step": 532
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830345169634176e-05,
+ "loss": 0.6881,
+ "step": 533
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8296237463763543e-05,
+ "loss": 0.715,
+ "step": 534
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8289009353218995e-05,
+ "loss": 0.6861,
+ "step": 535
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8281767376799315e-05,
+ "loss": 0.7402,
+ "step": 536
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.82745115466189e-05,
+ "loss": 0.7246,
+ "step": 537
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8267241874815312e-05,
+ "loss": 0.7718,
+ "step": 538
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8259958373549285e-05,
+ "loss": 0.7599,
+ "step": 539
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8252661055004672e-05,
+ "loss": 0.7761,
+ "step": 540
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824534993138845e-05,
+ "loss": 0.6704,
+ "step": 541
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823802501493068e-05,
+ "loss": 0.7119,
+ "step": 542
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8230686317884502e-05,
+ "loss": 0.7753,
+ "step": 543
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8223333852526107e-05,
+ "loss": 0.7905,
+ "step": 544
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8215967631154718e-05,
+ "loss": 0.6533,
+ "step": 545
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8208587666092566e-05,
+ "loss": 0.7377,
+ "step": 546
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8201193969684875e-05,
+ "loss": 0.6425,
+ "step": 547
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819378655429984e-05,
+ "loss": 0.7681,
+ "step": 548
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8186365432328605e-05,
+ "loss": 0.7723,
+ "step": 549
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8178930616185236e-05,
+ "loss": 0.6642,
+ "step": 550
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8171482118306712e-05,
+ "loss": 0.7154,
+ "step": 551
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8164019951152903e-05,
+ "loss": 0.739,
+ "step": 552
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156544127206536e-05,
+ "loss": 0.6675,
+ "step": 553
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149054658973194e-05,
+ "loss": 0.7325,
+ "step": 554
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.814155155898127e-05,
+ "loss": 0.7725,
+ "step": 555
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134034839781972e-05,
+ "loss": 0.7817,
+ "step": 556
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8126504513949287e-05,
+ "loss": 0.7206,
+ "step": 557
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811896059407996e-05,
+ "loss": 0.7656,
+ "step": 558
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811140309279348e-05,
+ "loss": 0.7385,
+ "step": 559
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8103832022732056e-05,
+ "loss": 0.7615,
+ "step": 560
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8096247396560585e-05,
+ "loss": 0.7303,
+ "step": 561
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8088649226966658e-05,
+ "loss": 0.7372,
+ "step": 562
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8081037526660505e-05,
+ "loss": 0.7408,
+ "step": 563
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8073412308375e-05,
+ "loss": 0.7297,
+ "step": 564
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065773584865623e-05,
+ "loss": 0.8023,
+ "step": 565
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805812136891046e-05,
+ "loss": 0.7318,
+ "step": 566
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050455673310142e-05,
+ "loss": 0.7345,
+ "step": 567
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.804277651088787e-05,
+ "loss": 0.795,
+ "step": 568
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8035083894489364e-05,
+ "loss": 0.7147,
+ "step": 569
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8027377836982857e-05,
+ "loss": 0.7494,
+ "step": 570
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8019658351259056e-05,
+ "loss": 0.7274,
+ "step": 571
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8011925450231135e-05,
+ "loss": 0.7522,
+ "step": 572
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8004179146834712e-05,
+ "loss": 0.7966,
+ "step": 573
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7996419454027828e-05,
+ "loss": 0.7288,
+ "step": 574
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.79886463847909e-05,
+ "loss": 0.7354,
+ "step": 575
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7980859952126755e-05,
+ "loss": 0.7446,
+ "step": 576
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7973060169060546e-05,
+ "loss": 0.7817,
+ "step": 577
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796524704863977e-05,
+ "loss": 0.7171,
+ "step": 578
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795742060393424e-05,
+ "loss": 0.7084,
+ "step": 579
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7949580848036045e-05,
+ "loss": 0.6413,
+ "step": 580
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941727794059553e-05,
+ "loss": 0.7867,
+ "step": 581
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7933861455141365e-05,
+ "loss": 0.7014,
+ "step": 582
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792598184444032e-05,
+ "loss": 0.6806,
+ "step": 583
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7918088975137445e-05,
+ "loss": 0.765,
+ "step": 584
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7910182860435954e-05,
+ "loss": 0.7644,
+ "step": 585
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7902263513561214e-05,
+ "loss": 0.7475,
+ "step": 586
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7894330947760728e-05,
+ "loss": 0.8067,
+ "step": 587
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.788638517630411e-05,
+ "loss": 0.7561,
+ "step": 588
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7878426212483066e-05,
+ "loss": 0.7512,
+ "step": 589
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7870454069611375e-05,
+ "loss": 0.8004,
+ "step": 590
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786246876102485e-05,
+ "loss": 0.6689,
+ "step": 591
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7854470300081338e-05,
+ "loss": 0.7269,
+ "step": 592
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846458700160685e-05,
+ "loss": 0.6416,
+ "step": 593
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838433974664714e-05,
+ "loss": 0.7942,
+ "step": 594
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830396137017202e-05,
+ "loss": 0.7733,
+ "step": 595
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7822345200663874e-05,
+ "loss": 0.7458,
+ "step": 596
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7814281179072345e-05,
+ "loss": 0.7177,
+ "step": 597
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.780620408573213e-05,
+ "loss": 0.6977,
+ "step": 598
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7798113934154623e-05,
+ "loss": 0.7716,
+ "step": 599
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7790010737873035e-05,
+ "loss": 0.7018,
+ "step": 600
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778189451044242e-05,
+ "loss": 0.7459,
+ "step": 601
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777376526543962e-05,
+ "loss": 0.7979,
+ "step": 602
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776562301646326e-05,
+ "loss": 0.7275,
+ "step": 603
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.775746777713371e-05,
+ "loss": 0.7528,
+ "step": 604
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7749299561093076e-05,
+ "loss": 0.7484,
+ "step": 605
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7741118382005172e-05,
+ "loss": 0.7497,
+ "step": 606
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732924253555487e-05,
+ "loss": 0.6986,
+ "step": 607
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.772471718945119e-05,
+ "loss": 0.698,
+ "step": 608
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7716497203421072e-05,
+ "loss": 0.7101,
+ "step": 609
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7708264309215543e-05,
+ "loss": 0.6875,
+ "step": 610
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770001852060661e-05,
+ "loss": 0.7388,
+ "step": 611
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7691759851387852e-05,
+ "loss": 0.775,
+ "step": 612
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7683488315374386e-05,
+ "loss": 0.6943,
+ "step": 613
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7675203926402863e-05,
+ "loss": 0.7513,
+ "step": 614
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7666906698331426e-05,
+ "loss": 0.769,
+ "step": 615
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76585966450397e-05,
+ "loss": 0.6769,
+ "step": 616
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765027378042876e-05,
+ "loss": 0.6702,
+ "step": 617
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.764193811842112e-05,
+ "loss": 0.7519,
+ "step": 618
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763358967296069e-05,
+ "loss": 0.7719,
+ "step": 619
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.762522845801277e-05,
+ "loss": 0.7817,
+ "step": 620
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7616854487564024e-05,
+ "loss": 0.7633,
+ "step": 621
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7608467775622448e-05,
+ "loss": 0.6792,
+ "step": 622
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600068336217355e-05,
+ "loss": 0.7423,
+ "step": 623
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759165618339935e-05,
+ "loss": 0.7373,
+ "step": 624
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7583231331240297e-05,
+ "loss": 0.7439,
+ "step": 625
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7574793793833312e-05,
+ "loss": 0.7567,
+ "step": 626
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7566343585292732e-05,
+ "loss": 0.7741,
+ "step": 627
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7557880719754082e-05,
+ "loss": 0.6776,
+ "step": 628
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7549405211374072e-05,
+ "loss": 0.7383,
+ "step": 629
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7540917074330543e-05,
+ "loss": 0.7796,
+ "step": 630
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7532416322822473e-05,
+ "loss": 0.7773,
+ "step": 631
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7523902971069944e-05,
+ "loss": 0.6947,
+ "step": 632
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515377033314113e-05,
+ "loss": 0.7188,
+ "step": 633
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7506838523817183e-05,
+ "loss": 0.8095,
+ "step": 634
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7498287456862398e-05,
+ "loss": 0.7519,
+ "step": 635
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7489723846754e-05,
+ "loss": 0.6933,
+ "step": 636
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7481147707817227e-05,
+ "loss": 0.68,
+ "step": 637
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7472559054398256e-05,
+ "loss": 0.7605,
+ "step": 638
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.746395790086421e-05,
+ "loss": 0.7019,
+ "step": 639
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7455344261603116e-05,
+ "loss": 0.6643,
+ "step": 640
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7446718151023894e-05,
+ "loss": 0.7476,
+ "step": 641
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7438079583556326e-05,
+ "loss": 0.71,
+ "step": 642
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7429428573651023e-05,
+ "loss": 0.6693,
+ "step": 643
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.742076513577942e-05,
+ "loss": 0.7234,
+ "step": 644
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7412089284433734e-05,
+ "loss": 0.7251,
+ "step": 645
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403401034126954e-05,
+ "loss": 0.7391,
+ "step": 646
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394700399392804e-05,
+ "loss": 0.7672,
+ "step": 647
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7385987394785723e-05,
+ "loss": 0.7922,
+ "step": 648
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377262034880854e-05,
+ "loss": 0.7401,
+ "step": 649
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7368524334273998e-05,
+ "loss": 0.7217,
+ "step": 650
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7359774307581602e-05,
+ "loss": 0.726,
+ "step": 651
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7351011969440728e-05,
+ "loss": 0.7817,
+ "step": 652
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.734223733450904e-05,
+ "loss": 0.7589,
+ "step": 653
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733345041746477e-05,
+ "loss": 0.7265,
+ "step": 654
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.732465123300669e-05,
+ "loss": 0.6829,
+ "step": 655
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7315839795854103e-05,
+ "loss": 0.7399,
+ "step": 656
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.73070161207468e-05,
+ "loss": 0.7467,
+ "step": 657
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7298180222445043e-05,
+ "loss": 0.7532,
+ "step": 658
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289332115729554e-05,
+ "loss": 0.7153,
+ "step": 659
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280471815401453e-05,
+ "loss": 0.695,
+ "step": 660
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271599336282285e-05,
+ "loss": 0.7498,
+ "step": 661
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7262714693213948e-05,
+ "loss": 0.705,
+ "step": 662
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7253817901058698e-05,
+ "loss": 0.7947,
+ "step": 663
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7244908974699112e-05,
+ "loss": 0.6685,
+ "step": 664
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7235987929038056e-05,
+ "loss": 0.7129,
+ "step": 665
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7227054778998682e-05,
+ "loss": 0.7036,
+ "step": 666
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.721810953952439e-05,
+ "loss": 0.7373,
+ "step": 667
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720915222557879e-05,
+ "loss": 0.7118,
+ "step": 668
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7200182852145705e-05,
+ "loss": 0.6344,
+ "step": 669
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7191201434229118e-05,
+ "loss": 0.7264,
+ "step": 670
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7182207986853176e-05,
+ "loss": 0.7638,
+ "step": 671
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7173202525062133e-05,
+ "loss": 0.7011,
+ "step": 672
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716418506392035e-05,
+ "loss": 0.7662,
+ "step": 673
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155155618512255e-05,
+ "loss": 0.7271,
+ "step": 674
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146114203942332e-05,
+ "loss": 0.7562,
+ "step": 675
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137060835335076e-05,
+ "loss": 0.7919,
+ "step": 676
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712799552783499e-05,
+ "loss": 0.7463,
+ "step": 677
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7118918296606536e-05,
+ "loss": 0.7298,
+ "step": 678
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7109829156834133e-05,
+ "loss": 0.6717,
+ "step": 679
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7100728123722115e-05,
+ "loss": 0.7353,
+ "step": 680
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7091615212494714e-05,
+ "loss": 0.7525,
+ "step": 681
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7082490438396027e-05,
+ "loss": 0.7923,
+ "step": 682
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707335381669e-05,
+ "loss": 0.7602,
+ "step": 683
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7064205362660397e-05,
+ "loss": 0.7548,
+ "step": 684
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7055045091610772e-05,
+ "loss": 0.7965,
+ "step": 685
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7045873018864452e-05,
+ "loss": 0.6743,
+ "step": 686
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70366891597645e-05,
+ "loss": 0.7396,
+ "step": 687
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70274935296737e-05,
+ "loss": 0.7722,
+ "step": 688
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.701828614397452e-05,
+ "loss": 0.7542,
+ "step": 689
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7009067018069106e-05,
+ "loss": 0.6523,
+ "step": 690
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6999836167379228e-05,
+ "loss": 0.7061,
+ "step": 691
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6990593607346276e-05,
+ "loss": 0.7394,
+ "step": 692
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.698133935343123e-05,
+ "loss": 0.7396,
+ "step": 693
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6972073421114626e-05,
+ "loss": 0.7186,
+ "step": 694
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6962795825896537e-05,
+ "loss": 0.7011,
+ "step": 695
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695350658329655e-05,
+ "loss": 0.7761,
+ "step": 696
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6944205708853735e-05,
+ "loss": 0.7179,
+ "step": 697
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6934893218126607e-05,
+ "loss": 0.7114,
+ "step": 698
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925569126693135e-05,
+ "loss": 0.7096,
+ "step": 699
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6916233450150675e-05,
+ "loss": 0.7638,
+ "step": 700
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6906886204115974e-05,
+ "loss": 0.7486,
+ "step": 701
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6897527404225128e-05,
+ "loss": 0.7269,
+ "step": 702
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6888157066133555e-05,
+ "loss": 0.789,
+ "step": 703
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6878775205515983e-05,
+ "loss": 0.7078,
+ "step": 704
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6869381838066412e-05,
+ "loss": 0.7141,
+ "step": 705
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.685997697949809e-05,
+ "loss": 0.7488,
+ "step": 706
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6850560645543485e-05,
+ "loss": 0.7192,
+ "step": 707
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684113285195427e-05,
+ "loss": 0.7769,
+ "step": 708
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6831693614501272e-05,
+ "loss": 0.6811,
+ "step": 709
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6822242948974475e-05,
+ "loss": 0.7344,
+ "step": 710
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6812780871182976e-05,
+ "loss": 0.6152,
+ "step": 711
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6803307396954962e-05,
+ "loss": 0.7213,
+ "step": 712
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793822542137683e-05,
+ "loss": 0.7073,
+ "step": 713
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6784326322597427e-05,
+ "loss": 0.6996,
+ "step": 714
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6774818754219495e-05,
+ "loss": 0.7209,
+ "step": 715
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.676529985290817e-05,
+ "loss": 0.6318,
+ "step": 716
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675576963458669e-05,
+ "loss": 0.7636,
+ "step": 717
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674622811519723e-05,
+ "loss": 0.7564,
+ "step": 718
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673667531070087e-05,
+ "loss": 0.8221,
+ "step": 719
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672711123707756e-05,
+ "loss": 0.7961,
+ "step": 720
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6717535910326108e-05,
+ "loss": 0.7032,
+ "step": 721
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6707949346464147e-05,
+ "loss": 0.7394,
+ "step": 722
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.66983515615281e-05,
+ "loss": 0.7069,
+ "step": 723
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6688742571573167e-05,
+ "loss": 0.7781,
+ "step": 724
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6679122392673284e-05,
+ "loss": 0.689,
+ "step": 725
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6669491040921123e-05,
+ "loss": 0.6727,
+ "step": 726
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665984853242802e-05,
+ "loss": 0.7625,
+ "step": 727
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650194883323995e-05,
+ "loss": 0.672,
+ "step": 728
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664053010975769e-05,
+ "loss": 0.8106,
+ "step": 729
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6630854227896364e-05,
+ "loss": 0.7444,
+ "step": 730
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6621167253925857e-05,
+ "loss": 0.7207,
+ "step": 731
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6611469204050566e-05,
+ "loss": 0.735,
+ "step": 732
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6601760094493408e-05,
+ "loss": 0.7443,
+ "step": 733
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6592039941495803e-05,
+ "loss": 0.719,
+ "step": 734
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6582308761317652e-05,
+ "loss": 0.757,
+ "step": 735
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6572566570237293e-05,
+ "loss": 0.697,
+ "step": 736
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656281338455149e-05,
+ "loss": 0.7055,
+ "step": 737
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6553049220575392e-05,
+ "loss": 0.7871,
+ "step": 738
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6543274094642522e-05,
+ "loss": 0.7265,
+ "step": 739
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6533488023104732e-05,
+ "loss": 0.8002,
+ "step": 740
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6523691022332184e-05,
+ "loss": 0.6925,
+ "step": 741
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6513883108713326e-05,
+ "loss": 0.811,
+ "step": 742
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6504064298654864e-05,
+ "loss": 0.748,
+ "step": 743
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6494234608581722e-05,
+ "loss": 0.7385,
+ "step": 744
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6484394054937033e-05,
+ "loss": 0.6935,
+ "step": 745
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64745426541821e-05,
+ "loss": 0.7673,
+ "step": 746
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6464680422796367e-05,
+ "loss": 0.6886,
+ "step": 747
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64548073772774e-05,
+ "loss": 0.7274,
+ "step": 748
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6444923534140856e-05,
+ "loss": 0.6874,
+ "step": 749
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.643502890992045e-05,
+ "loss": 0.8254,
+ "step": 750
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.642512352116793e-05,
+ "loss": 0.7353,
+ "step": 751
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641520738445306e-05,
+ "loss": 0.7479,
+ "step": 752
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405280516363576e-05,
+ "loss": 0.7013,
+ "step": 753
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.639534293350517e-05,
+ "loss": 0.7278,
+ "step": 754
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6385394652501444e-05,
+ "loss": 0.713,
+ "step": 755
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.637543568999392e-05,
+ "loss": 0.7211,
+ "step": 756
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6365466062641974e-05,
+ "loss": 0.7149,
+ "step": 757
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6355485787122814e-05,
+ "loss": 0.8077,
+ "step": 758
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6345494880131476e-05,
+ "loss": 0.6856,
+ "step": 759
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.633549335838077e-05,
+ "loss": 0.7151,
+ "step": 760
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.632548123860127e-05,
+ "loss": 0.6993,
+ "step": 761
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6315458537541272e-05,
+ "loss": 0.6942,
+ "step": 762
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6305425271966772e-05,
+ "loss": 0.7544,
+ "step": 763
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6295381458661447e-05,
+ "loss": 0.7041,
+ "step": 764
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6285327114426608e-05,
+ "loss": 0.7259,
+ "step": 765
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627526225608119e-05,
+ "loss": 0.7307,
+ "step": 766
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265186900461715e-05,
+ "loss": 0.6805,
+ "step": 767
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255101064422252e-05,
+ "loss": 0.6151,
+ "step": 768
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6245004764834423e-05,
+ "loss": 0.6578,
+ "step": 769
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6234898018587336e-05,
+ "loss": 0.7607,
+ "step": 770
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6224780842587586e-05,
+ "loss": 0.7222,
+ "step": 771
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6214653253759206e-05,
+ "loss": 0.7214,
+ "step": 772
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6204515269043653e-05,
+ "loss": 0.795,
+ "step": 773
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619436690539977e-05,
+ "loss": 0.7407,
+ "step": 774
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6184208179803772e-05,
+ "loss": 0.7442,
+ "step": 775
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617403910924919e-05,
+ "loss": 0.6876,
+ "step": 776
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6163859710746878e-05,
+ "loss": 0.7289,
+ "step": 777
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153670001324955e-05,
+ "loss": 0.6998,
+ "step": 778
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143469998028792e-05,
+ "loss": 0.7277,
+ "step": 779
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613325971792098e-05,
+ "loss": 0.6848,
+ "step": 780
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123039178081303e-05,
+ "loss": 0.7825,
+ "step": 781
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61128083956067e-05,
+ "loss": 0.7389,
+ "step": 782
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.610256738761125e-05,
+ "loss": 0.7472,
+ "step": 783
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609231617122614e-05,
+ "loss": 0.7298,
+ "step": 784
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6082054763599627e-05,
+ "loss": 0.7048,
+ "step": 785
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607178318189702e-05,
+ "loss": 0.76,
+ "step": 786
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6061501443300644e-05,
+ "loss": 0.7616,
+ "step": 787
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605120956500982e-05,
+ "loss": 0.6793,
+ "step": 788
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6040907564240823e-05,
+ "loss": 0.7988,
+ "step": 789
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6030595458226872e-05,
+ "loss": 0.6935,
+ "step": 790
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6020273264218076e-05,
+ "loss": 0.6739,
+ "step": 791
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600994099948143e-05,
+ "loss": 0.7007,
+ "step": 792
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5999598681300766e-05,
+ "loss": 0.7535,
+ "step": 793
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989246326976746e-05,
+ "loss": 0.6999,
+ "step": 794
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5978883953826814e-05,
+ "loss": 0.7164,
+ "step": 795
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.596851157918517e-05,
+ "loss": 0.732,
+ "step": 796
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5958129220402745e-05,
+ "loss": 0.7284,
+ "step": 797
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.594773689484718e-05,
+ "loss": 0.7066,
+ "step": 798
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593733461990278e-05,
+ "loss": 0.777,
+ "step": 799
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5926922412970495e-05,
+ "loss": 0.6643,
+ "step": 800
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5916500291467892e-05,
+ "loss": 0.7571,
+ "step": 801
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5906068272829122e-05,
+ "loss": 0.7561,
+ "step": 802
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.589562637450489e-05,
+ "loss": 0.6655,
+ "step": 803
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5885174613962427e-05,
+ "loss": 0.8119,
+ "step": 804
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874713008685464e-05,
+ "loss": 0.765,
+ "step": 805
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5864241576174205e-05,
+ "loss": 0.6736,
+ "step": 806
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5853760333945282e-05,
+ "loss": 0.676,
+ "step": 807
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5843269299531748e-05,
+ "loss": 0.6909,
+ "step": 808
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.583276849048303e-05,
+ "loss": 0.7342,
+ "step": 809
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5822257924364902e-05,
+ "loss": 0.6885,
+ "step": 810
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581173761875947e-05,
+ "loss": 0.6646,
+ "step": 811
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580120759126512e-05,
+ "loss": 0.7565,
+ "step": 812
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579066785949652e-05,
+ "loss": 0.6838,
+ "step": 813
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5780118441084544e-05,
+ "loss": 0.7319,
+ "step": 814
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5769559353676297e-05,
+ "loss": 0.7276,
+ "step": 815
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5758990614935042e-05,
+ "loss": 0.6894,
+ "step": 816
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5748412242540193e-05,
+ "loss": 0.7782,
+ "step": 817
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737824254187275e-05,
+ "loss": 0.7232,
+ "step": 818
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727226667587898e-05,
+ "loss": 0.7293,
+ "step": 819
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716619500469735e-05,
+ "loss": 0.7244,
+ "step": 820
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706002770576478e-05,
+ "loss": 0.7549,
+ "step": 821
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5695376495667828e-05,
+ "loss": 0.7259,
+ "step": 822
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568474069351943e-05,
+ "loss": 0.7685,
+ "step": 823
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5674095381922886e-05,
+ "loss": 0.7859,
+ "step": 824
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5663440578685706e-05,
+ "loss": 0.7323,
+ "step": 825
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565277630163126e-05,
+ "loss": 0.7022,
+ "step": 826
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564210256859878e-05,
+ "loss": 0.7688,
+ "step": 827
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.563141939744332e-05,
+ "loss": 0.6538,
+ "step": 828
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56207268060357e-05,
+ "loss": 0.7676,
+ "step": 829
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5610024812262526e-05,
+ "loss": 0.6682,
+ "step": 830
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559931343402612e-05,
+ "loss": 0.7629,
+ "step": 831
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588592689244492e-05,
+ "loss": 0.8038,
+ "step": 832
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5577862595851335e-05,
+ "loss": 0.7049,
+ "step": 833
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.556712317179598e-05,
+ "loss": 0.7101,
+ "step": 834
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5556374435043353e-05,
+ "loss": 0.7148,
+ "step": 835
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5545616403573977e-05,
+ "loss": 0.7397,
+ "step": 836
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5534849095383902e-05,
+ "loss": 0.7384,
+ "step": 837
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5524072528484716e-05,
+ "loss": 0.7111,
+ "step": 838
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5513286720903488e-05,
+ "loss": 0.6912,
+ "step": 839
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5502491690682732e-05,
+ "loss": 0.7212,
+ "step": 840
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5491687455880414e-05,
+ "loss": 0.6764,
+ "step": 841
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5480874034569876e-05,
+ "loss": 0.7154,
+ "step": 842
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547005144483984e-05,
+ "loss": 0.6909,
+ "step": 843
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5459219704794358e-05,
+ "loss": 0.6112,
+ "step": 844
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5448378832552796e-05,
+ "loss": 0.7361,
+ "step": 845
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437528846249783e-05,
+ "loss": 0.7308,
+ "step": 846
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542666976403521e-05,
+ "loss": 0.6926,
+ "step": 847
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.541580160407417e-05,
+ "loss": 0.7326,
+ "step": 848
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5404924384546955e-05,
+ "loss": 0.7873,
+ "step": 849
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5394038123649e-05,
+ "loss": 0.649,
+ "step": 850
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5383142839590872e-05,
+ "loss": 0.7764,
+ "step": 851
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5372238550598225e-05,
+ "loss": 0.6581,
+ "step": 852
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536132527491178e-05,
+ "loss": 0.6894,
+ "step": 853
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53504030307873e-05,
+ "loss": 0.7471,
+ "step": 854
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5339471836495524e-05,
+ "loss": 0.7491,
+ "step": 855
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5328531710322198e-05,
+ "loss": 0.7239,
+ "step": 856
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5317582670567986e-05,
+ "loss": 0.74,
+ "step": 857
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5306624735548465e-05,
+ "loss": 0.7205,
+ "step": 858
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52956579235941e-05,
+ "loss": 0.7507,
+ "step": 859
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52846822530502e-05,
+ "loss": 0.7315,
+ "step": 860
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527369774227689e-05,
+ "loss": 0.7803,
+ "step": 861
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.526270440964909e-05,
+ "loss": 0.7233,
+ "step": 862
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5251702273556475e-05,
+ "loss": 0.7377,
+ "step": 863
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5240691352403441e-05,
+ "loss": 0.7494,
+ "step": 864
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5229671664609088e-05,
+ "loss": 0.7653,
+ "step": 865
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521864322860717e-05,
+ "loss": 0.696,
+ "step": 866
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5207606062846092e-05,
+ "loss": 0.7974,
+ "step": 867
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5196560185788842e-05,
+ "loss": 0.7025,
+ "step": 868
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5185505615912998e-05,
+ "loss": 0.7249,
+ "step": 869
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.517444237171067e-05,
+ "loss": 0.7724,
+ "step": 870
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5163370471688471e-05,
+ "loss": 0.7446,
+ "step": 871
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152289934367516e-05,
+ "loss": 0.6557,
+ "step": 872
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141200778283345e-05,
+ "loss": 0.6653,
+ "step": 873
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5130103021985929e-05,
+ "loss": 0.7967,
+ "step": 874
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5118996684039621e-05,
+ "loss": 0.7232,
+ "step": 875
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5107881783023138e-05,
+ "loss": 0.686,
+ "step": 876
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50967583375295e-05,
+ "loss": 0.7378,
+ "step": 877
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5085626366166039e-05,
+ "loss": 0.7622,
+ "step": 878
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5074485887554351e-05,
+ "loss": 0.6785,
+ "step": 879
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5063336920330244e-05,
+ "loss": 0.7237,
+ "step": 880
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5052179483143752e-05,
+ "loss": 0.7566,
+ "step": 881
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5041013594659051e-05,
+ "loss": 0.7706,
+ "step": 882
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5029839273554469e-05,
+ "loss": 0.7093,
+ "step": 883
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5018656538522442e-05,
+ "loss": 0.7402,
+ "step": 884
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5007465408269477e-05,
+ "loss": 0.6275,
+ "step": 885
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996265901516116e-05,
+ "loss": 0.688,
+ "step": 886
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985058036996929e-05,
+ "loss": 0.7567,
+ "step": 887
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4973841833460457e-05,
+ "loss": 0.7034,
+ "step": 888
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4962617309669192e-05,
+ "loss": 0.7125,
+ "step": 889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4951384484399542e-05,
+ "loss": 0.6969,
+ "step": 890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4940143376441812e-05,
+ "loss": 0.7094,
+ "step": 891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.492889400460015e-05,
+ "loss": 0.6931,
+ "step": 892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.491763638769253e-05,
+ "loss": 0.7134,
+ "step": 893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.490637054455073e-05,
+ "loss": 0.7068,
+ "step": 894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4895096494020274e-05,
+ "loss": 0.7059,
+ "step": 895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4883814254960418e-05,
+ "loss": 0.7223,
+ "step": 896
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4872523846244127e-05,
+ "loss": 0.7097,
+ "step": 897
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4861225286758018e-05,
+ "loss": 0.7931,
+ "step": 898
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4849918595402353e-05,
+ "loss": 0.7084,
+ "step": 899
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4838603791090989e-05,
+ "loss": 0.7286,
+ "step": 900
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4827280892751364e-05,
+ "loss": 0.7926,
+ "step": 901
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4815949919324443e-05,
+ "loss": 0.7257,
+ "step": 902
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4804610889764712e-05,
+ "loss": 0.7326,
+ "step": 903
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4793263823040127e-05,
+ "loss": 0.7804,
+ "step": 904
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4781908738132085e-05,
+ "loss": 0.7004,
+ "step": 905
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4770545654035403e-05,
+ "loss": 0.7194,
+ "step": 906
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4759174589758272e-05,
+ "loss": 0.6807,
+ "step": 907
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4747795564322243e-05,
+ "loss": 0.7616,
+ "step": 908
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473640859676217e-05,
+ "loss": 0.6851,
+ "step": 909
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4725013706126207e-05,
+ "loss": 0.7324,
+ "step": 910
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471361091147575e-05,
+ "loss": 0.7519,
+ "step": 911
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702200231885425e-05,
+ "loss": 0.7597,
+ "step": 912
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4690781686443044e-05,
+ "loss": 0.6799,
+ "step": 913
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4679355294249575e-05,
+ "loss": 0.7063,
+ "step": 914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4667921074419118e-05,
+ "loss": 0.9109,
+ "step": 915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4656479046078862e-05,
+ "loss": 0.6954,
+ "step": 916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4645029228369062e-05,
+ "loss": 0.7546,
+ "step": 917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4633571640442998e-05,
+ "loss": 0.7376,
+ "step": 918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4622106301466956e-05,
+ "loss": 0.7129,
+ "step": 919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4610633230620179e-05,
+ "loss": 0.7309,
+ "step": 920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4599152447094846e-05,
+ "loss": 0.6998,
+ "step": 921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4587663970096048e-05,
+ "loss": 0.6711,
+ "step": 922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4576167818841732e-05,
+ "loss": 0.8417,
+ "step": 923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564664012562686e-05,
+ "loss": 0.7627,
+ "step": 924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4553152570502506e-05,
+ "loss": 0.7172,
+ "step": 925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541633511917567e-05,
+ "loss": 0.6909,
+ "step": 926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.453010685607697e-05,
+ "loss": 0.7444,
+ "step": 927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.451857262226254e-05,
+ "loss": 0.7009,
+ "step": 928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4507030829768765e-05,
+ "loss": 0.7559,
+ "step": 929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4495481497902789e-05,
+ "loss": 0.7428,
+ "step": 930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4483924645984359e-05,
+ "loss": 0.7324,
+ "step": 931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4472360293345807e-05,
+ "loss": 0.7708,
+ "step": 932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4460788459332011e-05,
+ "loss": 0.6982,
+ "step": 933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.444920916330036e-05,
+ "loss": 0.6758,
+ "step": 934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4437622424620733e-05,
+ "loss": 0.6988,
+ "step": 935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4426028262675451e-05,
+ "loss": 0.7192,
+ "step": 936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441442669685926e-05,
+ "loss": 0.7863,
+ "step": 937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440281774657929e-05,
+ "loss": 0.6634,
+ "step": 938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391201431255014e-05,
+ "loss": 0.7147,
+ "step": 939
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4379577770318234e-05,
+ "loss": 0.6696,
+ "step": 940
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.436794678321304e-05,
+ "loss": 0.7312,
+ "step": 941
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4356308489395783e-05,
+ "loss": 0.7132,
+ "step": 942
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4344662908335018e-05,
+ "loss": 0.7166,
+ "step": 943
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4333010059511505e-05,
+ "loss": 0.7151,
+ "step": 944
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4321349962418163e-05,
+ "loss": 0.7141,
+ "step": 945
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4309682636560027e-05,
+ "loss": 0.6664,
+ "step": 946
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4298008101454226e-05,
+ "loss": 0.7223,
+ "step": 947
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4286326376629956e-05,
+ "loss": 0.7222,
+ "step": 948
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4274637481628431e-05,
+ "loss": 0.7376,
+ "step": 949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4262941436002862e-05,
+ "loss": 0.7515,
+ "step": 950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.425123825931843e-05,
+ "loss": 0.7296,
+ "step": 951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.423952797115223e-05,
+ "loss": 0.7029,
+ "step": 952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4227810591093262e-05,
+ "loss": 0.6757,
+ "step": 953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216086138742395e-05,
+ "loss": 0.7553,
+ "step": 954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4204354633712312e-05,
+ "loss": 0.6547,
+ "step": 955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4192616095627506e-05,
+ "loss": 0.7446,
+ "step": 956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4180870544124236e-05,
+ "loss": 0.7203,
+ "step": 957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416911799885049e-05,
+ "loss": 0.7809,
+ "step": 958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4157358479465949e-05,
+ "loss": 0.7599,
+ "step": 959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4145592005641967e-05,
+ "loss": 0.7771,
+ "step": 960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.413381859706153e-05,
+ "loss": 0.6599,
+ "step": 961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412203827341923e-05,
+ "loss": 0.7281,
+ "step": 962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4110251054421218e-05,
+ "loss": 0.6748,
+ "step": 963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4098456959785178e-05,
+ "loss": 0.7118,
+ "step": 964
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4086656009240308e-05,
+ "loss": 0.6742,
+ "step": 965
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4074848222527262e-05,
+ "loss": 0.7773,
+ "step": 966
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4063033619398136e-05,
+ "loss": 0.7868,
+ "step": 967
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4051212219616427e-05,
+ "loss": 0.7136,
+ "step": 968
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4039384042957003e-05,
+ "loss": 0.7151,
+ "step": 969
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4027549109206064e-05,
+ "loss": 0.7788,
+ "step": 970
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4015707438161122e-05,
+ "loss": 0.6691,
+ "step": 971
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4003859049630943e-05,
+ "loss": 0.7017,
+ "step": 972
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.399200396343555e-05,
+ "loss": 0.7083,
+ "step": 973
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3980142199406155e-05,
+ "loss": 0.6739,
+ "step": 974
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3968273777385146e-05,
+ "loss": 0.7386,
+ "step": 975
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3956398717226055e-05,
+ "loss": 0.6874,
+ "step": 976
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.39445170387935e-05,
+ "loss": 0.6918,
+ "step": 977
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3932628761963192e-05,
+ "loss": 0.7531,
+ "step": 978
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3920733906621861e-05,
+ "loss": 0.7332,
+ "step": 979
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3908832492667261e-05,
+ "loss": 0.6303,
+ "step": 980
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3896924540008098e-05,
+ "loss": 0.7639,
+ "step": 981
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3885010068564026e-05,
+ "loss": 0.6884,
+ "step": 982
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3873089098265606e-05,
+ "loss": 0.7703,
+ "step": 983
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.386116164905426e-05,
+ "loss": 0.7685,
+ "step": 984
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3849227740882257e-05,
+ "loss": 0.6787,
+ "step": 985
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3837287393712666e-05,
+ "loss": 0.7025,
+ "step": 986
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3825340627519337e-05,
+ "loss": 0.7497,
+ "step": 987
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3813387462286839e-05,
+ "loss": 0.7557,
+ "step": 988
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3801427918010461e-05,
+ "loss": 0.7946,
+ "step": 989
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.378946201469616e-05,
+ "loss": 0.7426,
+ "step": 990
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3777489772360521e-05,
+ "loss": 0.7485,
+ "step": 991
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3765511211030753e-05,
+ "loss": 0.7442,
+ "step": 992
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3753526350744611e-05,
+ "loss": 0.8228,
+ "step": 993
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3741535211550402e-05,
+ "loss": 0.8018,
+ "step": 994
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3729537813506932e-05,
+ "loss": 0.664,
+ "step": 995
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3717534176683485e-05,
+ "loss": 0.6951,
+ "step": 996
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3705524321159764e-05,
+ "loss": 0.7899,
+ "step": 997
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.369350826702589e-05,
+ "loss": 0.6793,
+ "step": 998
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3681486034382347e-05,
+ "loss": 0.7321,
+ "step": 999
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3669457643339956e-05,
+ "loss": 0.6275,
+ "step": 1000
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3657423114019837e-05,
+ "loss": 0.7294,
+ "step": 1001
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3645382466553381e-05,
+ "loss": 0.7588,
+ "step": 1002
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3633335721082215e-05,
+ "loss": 0.6977,
+ "step": 1003
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.362128289775816e-05,
+ "loss": 0.6995,
+ "step": 1004
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3609224016743213e-05,
+ "loss": 0.7721,
+ "step": 1005
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3597159098209493e-05,
+ "loss": 0.7281,
+ "step": 1006
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3585088162339233e-05,
+ "loss": 0.748,
+ "step": 1007
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.357301122932472e-05,
+ "loss": 0.7178,
+ "step": 1008
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3560928319368278e-05,
+ "loss": 0.6825,
+ "step": 1009
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3548839452682225e-05,
+ "loss": 0.6973,
+ "step": 1010
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.353674464948885e-05,
+ "loss": 0.701,
+ "step": 1011
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3524643930020374e-05,
+ "loss": 0.6997,
+ "step": 1012
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.35125373145189e-05,
+ "loss": 0.7767,
+ "step": 1013
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3500424823236413e-05,
+ "loss": 0.6611,
+ "step": 1014
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3488306476434714e-05,
+ "loss": 0.7465,
+ "step": 1015
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3476182294385398e-05,
+ "loss": 0.8132,
+ "step": 1016
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3464052297369838e-05,
+ "loss": 0.7148,
+ "step": 1017
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3451916505679115e-05,
+ "loss": 0.6836,
+ "step": 1018
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.343977493961401e-05,
+ "loss": 0.7502,
+ "step": 1019
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3427627619484968e-05,
+ "loss": 0.7362,
+ "step": 1020
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3415474565612059e-05,
+ "loss": 0.7326,
+ "step": 1021
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3403315798324935e-05,
+ "loss": 0.6254,
+ "step": 1022
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3391151337962813e-05,
+ "loss": 0.6952,
+ "step": 1023
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3378981204874432e-05,
+ "loss": 0.6705,
+ "step": 1024
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3366805419418026e-05,
+ "loss": 0.6478,
+ "step": 1025
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3354624001961267e-05,
+ "loss": 0.6997,
+ "step": 1026
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3342436972881272e-05,
+ "loss": 0.6544,
+ "step": 1027
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3330244352564527e-05,
+ "loss": 0.7257,
+ "step": 1028
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3318046161406876e-05,
+ "loss": 0.6675,
+ "step": 1029
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3305842419813488e-05,
+ "loss": 0.7425,
+ "step": 1030
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3293633148198812e-05,
+ "loss": 0.683,
+ "step": 1031
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3281418366986535e-05,
+ "loss": 0.6478,
+ "step": 1032
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3269198096609592e-05,
+ "loss": 0.7244,
+ "step": 1033
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3256972357510068e-05,
+ "loss": 0.763,
+ "step": 1034
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.324474117013921e-05,
+ "loss": 0.5932,
+ "step": 1035
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3232504554957385e-05,
+ "loss": 0.7454,
+ "step": 1036
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.322026253243403e-05,
+ "loss": 0.7296,
+ "step": 1037
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3208015123047623e-05,
+ "loss": 0.7184,
+ "step": 1038
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.319576234728567e-05,
+ "loss": 0.6558,
+ "step": 1039
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3183504225644643e-05,
+ "loss": 0.7235,
+ "step": 1040
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3171240778629952e-05,
+ "loss": 0.7098,
+ "step": 1041
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3158972026755927e-05,
+ "loss": 0.651,
+ "step": 1042
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3146697990545762e-05,
+ "loss": 0.6754,
+ "step": 1043
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3134418690531498e-05,
+ "loss": 0.7272,
+ "step": 1044
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3122134147253975e-05,
+ "loss": 0.7759,
+ "step": 1045
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3109844381262816e-05,
+ "loss": 0.7124,
+ "step": 1046
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3097549413116358e-05,
+ "loss": 0.7155,
+ "step": 1047
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3085249263381661e-05,
+ "loss": 0.7626,
+ "step": 1048
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3072943952634446e-05,
+ "loss": 0.71,
+ "step": 1049
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3060633501459065e-05,
+ "loss": 0.6449,
+ "step": 1050
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3048317930448472e-05,
+ "loss": 0.7252,
+ "step": 1051
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3035997260204187e-05,
+ "loss": 0.8045,
+ "step": 1052
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3023671511336253e-05,
+ "loss": 0.6678,
+ "step": 1053
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.301134070446321e-05,
+ "loss": 0.729,
+ "step": 1054
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2999004860212071e-05,
+ "loss": 0.6459,
+ "step": 1055
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2986663999218264e-05,
+ "loss": 0.7451,
+ "step": 1056
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2974318142125604e-05,
+ "loss": 0.5985,
+ "step": 1057
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2961967309586279e-05,
+ "loss": 0.7804,
+ "step": 1058
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2949611522260788e-05,
+ "loss": 0.729,
+ "step": 1059
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2937250800817922e-05,
+ "loss": 0.6245,
+ "step": 1060
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2924885165934728e-05,
+ "loss": 0.6134,
+ "step": 1061
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2912514638296472e-05,
+ "loss": 0.7461,
+ "step": 1062
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2900139238596598e-05,
+ "loss": 0.7602,
+ "step": 1063
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.288775898753671e-05,
+ "loss": 0.7697,
+ "step": 1064
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.287537390582652e-05,
+ "loss": 0.6862,
+ "step": 1065
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.286298401418382e-05,
+ "loss": 0.687,
+ "step": 1066
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2850589333334455e-05,
+ "loss": 0.6116,
+ "step": 1067
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2838189884012275e-05,
+ "loss": 0.7144,
+ "step": 1068
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2825785686959113e-05,
+ "loss": 0.7589,
+ "step": 1069
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2813376762924735e-05,
+ "loss": 0.7187,
+ "step": 1070
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2800963132666825e-05,
+ "loss": 0.7129,
+ "step": 1071
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.278854481695093e-05,
+ "loss": 0.6735,
+ "step": 1072
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.277612183655044e-05,
+ "loss": 0.688,
+ "step": 1073
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.276369421224655e-05,
+ "loss": 0.7438,
+ "step": 1074
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2751261964828214e-05,
+ "loss": 0.7739,
+ "step": 1075
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.273882511509213e-05,
+ "loss": 0.5997,
+ "step": 1076
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.272638368384269e-05,
+ "loss": 0.7333,
+ "step": 1077
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2713937691891953e-05,
+ "loss": 0.7239,
+ "step": 1078
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2701487160059601e-05,
+ "loss": 0.7352,
+ "step": 1079
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2689032109172914e-05,
+ "loss": 0.6482,
+ "step": 1080
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2676572560066736e-05,
+ "loss": 0.7916,
+ "step": 1081
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2664108533583428e-05,
+ "loss": 0.6773,
+ "step": 1082
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2651640050572845e-05,
+ "loss": 0.7356,
+ "step": 1083
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2639167131892294e-05,
+ "loss": 0.7084,
+ "step": 1084
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2626689798406506e-05,
+ "loss": 0.6952,
+ "step": 1085
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2614208070987592e-05,
+ "loss": 0.6386,
+ "step": 1086
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2601721970515024e-05,
+ "loss": 0.7488,
+ "step": 1087
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.258923151787557e-05,
+ "loss": 0.6701,
+ "step": 1088
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.25767367339633e-05,
+ "loss": 0.7588,
+ "step": 1089
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2564237639679512e-05,
+ "loss": 0.7211,
+ "step": 1090
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2551734255932728e-05,
+ "loss": 0.6789,
+ "step": 1091
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2539226603638633e-05,
+ "loss": 0.7169,
+ "step": 1092
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.252671470372006e-05,
+ "loss": 0.6923,
+ "step": 1093
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2514198577106948e-05,
+ "loss": 0.703,
+ "step": 1094
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.25016782447363e-05,
+ "loss": 0.7048,
+ "step": 1095
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2489153727552164e-05,
+ "loss": 0.7622,
+ "step": 1096
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.247662504650558e-05,
+ "loss": 0.7554,
+ "step": 1097
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2464092222554554e-05,
+ "loss": 0.6517,
+ "step": 1098
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2451555276664032e-05,
+ "loss": 0.6925,
+ "step": 1099
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2439014229805836e-05,
+ "loss": 0.7254,
+ "step": 1100
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2426469102958671e-05,
+ "loss": 0.7689,
+ "step": 1101
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2413919917108053e-05,
+ "loss": 0.6423,
+ "step": 1102
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2401366693246289e-05,
+ "loss": 0.6574,
+ "step": 1103
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2388809452372442e-05,
+ "loss": 0.6821,
+ "step": 1104
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2376248215492297e-05,
+ "loss": 0.7314,
+ "step": 1105
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2363683003618322e-05,
+ "loss": 0.7361,
+ "step": 1106
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.235111383776963e-05,
+ "loss": 0.6688,
+ "step": 1107
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.233854073897196e-05,
+ "loss": 0.7335,
+ "step": 1108
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2325963728257616e-05,
+ "loss": 0.7129,
+ "step": 1109
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2313382826665456e-05,
+ "loss": 0.615,
+ "step": 1110
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2300798055240836e-05,
+ "loss": 0.6797,
+ "step": 1111
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2288209435035605e-05,
+ "loss": 0.7145,
+ "step": 1112
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2275616987108028e-05,
+ "loss": 0.6863,
+ "step": 1113
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2263020732522786e-05,
+ "loss": 0.7051,
+ "step": 1114
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2250420692350921e-05,
+ "loss": 0.662,
+ "step": 1115
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2237816887669817e-05,
+ "loss": 0.7349,
+ "step": 1116
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2225209339563144e-05,
+ "loss": 0.6257,
+ "step": 1117
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2212598069120845e-05,
+ "loss": 0.694,
+ "step": 1118
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2199983097439079e-05,
+ "loss": 0.625,
+ "step": 1119
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2187364445620203e-05,
+ "loss": 0.6342,
+ "step": 1120
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2174742134772726e-05,
+ "loss": 0.6658,
+ "step": 1121
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2162116186011285e-05,
+ "loss": 0.726,
+ "step": 1122
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2149486620456599e-05,
+ "loss": 0.6691,
+ "step": 1123
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2136853459235427e-05,
+ "loss": 0.6447,
+ "step": 1124
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2124216723480565e-05,
+ "loss": 0.7263,
+ "step": 1125
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2111576434330767e-05,
+ "loss": 0.6962,
+ "step": 1126
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209893261293074e-05,
+ "loss": 0.6578,
+ "step": 1127
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2086285280431103e-05,
+ "loss": 0.6731,
+ "step": 1128
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2073634457988342e-05,
+ "loss": 0.736,
+ "step": 1129
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2060980166764782e-05,
+ "loss": 0.6777,
+ "step": 1130
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2048322427928556e-05,
+ "loss": 0.7161,
+ "step": 1131
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2035661262653562e-05,
+ "loss": 0.6609,
+ "step": 1132
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2022996692119424e-05,
+ "loss": 0.7662,
+ "step": 1133
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2010328737511466e-05,
+ "loss": 0.6488,
+ "step": 1134
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1997657420020678e-05,
+ "loss": 0.7402,
+ "step": 1135
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984982760843666e-05,
+ "loss": 0.6878,
+ "step": 1136
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1972304781182636e-05,
+ "loss": 0.7306,
+ "step": 1137
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.195962350224534e-05,
+ "loss": 0.6714,
+ "step": 1138
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1946938945245052e-05,
+ "loss": 0.6727,
+ "step": 1139
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1934251131400532e-05,
+ "loss": 0.678,
+ "step": 1140
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1921560081935986e-05,
+ "loss": 0.6445,
+ "step": 1141
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1908865818081032e-05,
+ "loss": 0.7038,
+ "step": 1142
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1896168361070664e-05,
+ "loss": 0.7862,
+ "step": 1143
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1883467732145224e-05,
+ "loss": 0.6302,
+ "step": 1144
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1870763952550347e-05,
+ "loss": 0.7335,
+ "step": 1145
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1858057043536955e-05,
+ "loss": 0.683,
+ "step": 1146
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1845347026361192e-05,
+ "loss": 0.7796,
+ "step": 1147
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1832633922284407e-05,
+ "loss": 0.7235,
+ "step": 1148
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1819917752573109e-05,
+ "loss": 0.6534,
+ "step": 1149
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1807198538498944e-05,
+ "loss": 0.7211,
+ "step": 1150
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.179447630133864e-05,
+ "loss": 0.6842,
+ "step": 1151
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1781751062373982e-05,
+ "loss": 0.6413,
+ "step": 1152
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1769022842891794e-05,
+ "loss": 0.708,
+ "step": 1153
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1756291664183858e-05,
+ "loss": 0.7201,
+ "step": 1154
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1743557547546928e-05,
+ "loss": 0.6646,
+ "step": 1155
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1730820514282669e-05,
+ "loss": 0.7069,
+ "step": 1156
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1718080585697617e-05,
+ "loss": 0.7486,
+ "step": 1157
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1705337783103155e-05,
+ "loss": 0.7431,
+ "step": 1158
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1692592127815475e-05,
+ "loss": 0.7432,
+ "step": 1159
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1679843641155544e-05,
+ "loss": 0.703,
+ "step": 1160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1667092344449053e-05,
+ "loss": 0.7146,
+ "step": 1161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1654338259026413e-05,
+ "loss": 0.6921,
+ "step": 1162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1641581406222682e-05,
+ "loss": 0.6552,
+ "step": 1163
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1628821807377557e-05,
+ "loss": 0.7622,
+ "step": 1164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1616059483835325e-05,
+ "loss": 0.6498,
+ "step": 1165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1603294456944835e-05,
+ "loss": 0.6836,
+ "step": 1166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1590526748059453e-05,
+ "loss": 0.7107,
+ "step": 1167
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1577756378537033e-05,
+ "loss": 0.6549,
+ "step": 1168
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1564983369739884e-05,
+ "loss": 0.6989,
+ "step": 1169
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.155220774303472e-05,
+ "loss": 0.5995,
+ "step": 1170
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1539429519792647e-05,
+ "loss": 0.6574,
+ "step": 1171
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1526648721389105e-05,
+ "loss": 0.7471,
+ "step": 1172
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1513865369203844e-05,
+ "loss": 0.6522,
+ "step": 1173
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1501079484620887e-05,
+ "loss": 0.7344,
+ "step": 1174
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1488291089028501e-05,
+ "loss": 0.7837,
+ "step": 1175
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1475500203819135e-05,
+ "loss": 0.7136,
+ "step": 1176
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1462706850389413e-05,
+ "loss": 0.7004,
+ "step": 1177
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1449911050140097e-05,
+ "loss": 0.6814,
+ "step": 1178
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1437112824476025e-05,
+ "loss": 0.7304,
+ "step": 1179
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.14243121948061e-05,
+ "loss": 0.7298,
+ "step": 1180
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1411509182543253e-05,
+ "loss": 0.7389,
+ "step": 1181
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1398703809104392e-05,
+ "loss": 0.6989,
+ "step": 1182
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1385896095910374e-05,
+ "loss": 0.7083,
+ "step": 1183
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1373086064385974e-05,
+ "loss": 0.7471,
+ "step": 1184
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1360273735959848e-05,
+ "loss": 0.7214,
+ "step": 1185
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1347459132064485e-05,
+ "loss": 0.6346,
+ "step": 1186
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1334642274136186e-05,
+ "loss": 0.7178,
+ "step": 1187
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1321823183615027e-05,
+ "loss": 0.7022,
+ "step": 1188
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.130900188194481e-05,
+ "loss": 0.7881,
+ "step": 1189
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1296178390573038e-05,
+ "loss": 0.7496,
+ "step": 1190
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283352730950886e-05,
+ "loss": 0.7071,
+ "step": 1191
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1270524924533146e-05,
+ "loss": 0.6865,
+ "step": 1192
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1257694992778199e-05,
+ "loss": 0.6757,
+ "step": 1193
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1244862957147993e-05,
+ "loss": 0.6983,
+ "step": 1194
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1232028839107985e-05,
+ "loss": 0.6968,
+ "step": 1195
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1219192660127117e-05,
+ "loss": 0.6897,
+ "step": 1196
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1206354441677782e-05,
+ "loss": 0.7326,
+ "step": 1197
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1193514205235789e-05,
+ "loss": 0.6756,
+ "step": 1198
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1180671972280311e-05,
+ "loss": 0.7253,
+ "step": 1199
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1167827764293867e-05,
+ "loss": 0.6906,
+ "step": 1200
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.115498160276228e-05,
+ "loss": 0.7203,
+ "step": 1201
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1142133509174644e-05,
+ "loss": 0.7159,
+ "step": 1202
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1129283505023273e-05,
+ "loss": 0.6763,
+ "step": 1203
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116431611803696e-05,
+ "loss": 0.6315,
+ "step": 1204
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1103577851014587e-05,
+ "loss": 0.6792,
+ "step": 1205
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.109072224415774e-05,
+ "loss": 0.6316,
+ "step": 1206
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1077864812738061e-05,
+ "loss": 0.6777,
+ "step": 1207
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1065005578263483e-05,
+ "loss": 0.7369,
+ "step": 1208
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1052144562244967e-05,
+ "loss": 0.7302,
+ "step": 1209
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1039281786196453e-05,
+ "loss": 0.6966,
+ "step": 1210
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1026417271634821e-05,
+ "loss": 0.733,
+ "step": 1211
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1013551040079866e-05,
+ "loss": 0.695,
+ "step": 1212
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1000683113054251e-05,
+ "loss": 0.7301,
+ "step": 1213
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0987813512083476e-05,
+ "loss": 0.7707,
+ "step": 1214
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0974942258695838e-05,
+ "loss": 0.6929,
+ "step": 1215
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0962069374422407e-05,
+ "loss": 0.6828,
+ "step": 1216
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0949194880796967e-05,
+ "loss": 0.7026,
+ "step": 1217
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0936318799356009e-05,
+ "loss": 0.702,
+ "step": 1218
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0923441151638676e-05,
+ "loss": 0.7636,
+ "step": 1219
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.091056195918672e-05,
+ "loss": 0.7483,
+ "step": 1220
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0897681243544492e-05,
+ "loss": 0.7116,
+ "step": 1221
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0884799026258879e-05,
+ "loss": 0.7102,
+ "step": 1222
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0871915328879294e-05,
+ "loss": 0.6821,
+ "step": 1223
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.085903017295761e-05,
+ "loss": 0.706,
+ "step": 1224
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0846143580048152e-05,
+ "loss": 0.6097,
+ "step": 1225
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.083325557170764e-05,
+ "loss": 0.7672,
+ "step": 1226
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0820366169495171e-05,
+ "loss": 0.7437,
+ "step": 1227
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0807475394972164e-05,
+ "loss": 0.7873,
+ "step": 1228
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0794583269702341e-05,
+ "loss": 0.7397,
+ "step": 1229
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0781689815251682e-05,
+ "loss": 0.8086,
+ "step": 1230
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0768795053188379e-05,
+ "loss": 0.6964,
+ "step": 1231
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0755899005082837e-05,
+ "loss": 0.6802,
+ "step": 1232
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0743001692507586e-05,
+ "loss": 0.684,
+ "step": 1233
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0730103137037284e-05,
+ "loss": 0.6481,
+ "step": 1234
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0717203360248669e-05,
+ "loss": 0.5927,
+ "step": 1235
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.070430238372052e-05,
+ "loss": 0.6751,
+ "step": 1236
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0691400229033617e-05,
+ "loss": 0.6285,
+ "step": 1237
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.067849691777072e-05,
+ "loss": 0.7115,
+ "step": 1238
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0665592471516518e-05,
+ "loss": 0.6953,
+ "step": 1239
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0652686911857599e-05,
+ "loss": 0.7741,
+ "step": 1240
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0639780260382417e-05,
+ "loss": 0.6922,
+ "step": 1241
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.062687253868125e-05,
+ "loss": 0.7097,
+ "step": 1242
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0613963768346167e-05,
+ "loss": 0.5847,
+ "step": 1243
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.060105397097099e-05,
+ "loss": 0.7531,
+ "step": 1244
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0588143168151257e-05,
+ "loss": 0.6995,
+ "step": 1245
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0575231381484195e-05,
+ "loss": 0.6386,
+ "step": 1246
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0562318632568667e-05,
+ "loss": 0.7166,
+ "step": 1247
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0549404943005157e-05,
+ "loss": 0.7139,
+ "step": 1248
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0536490334395713e-05,
+ "loss": 0.714,
+ "step": 1249
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0523574828343923e-05,
+ "loss": 0.7358,
+ "step": 1250
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.051065844645488e-05,
+ "loss": 0.6637,
+ "step": 1251
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.049774121033514e-05,
+ "loss": 0.7453,
+ "step": 1252
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0484823141592682e-05,
+ "loss": 0.6844,
+ "step": 1253
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0471904261836888e-05,
+ "loss": 0.6509,
+ "step": 1254
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.045898459267849e-05,
+ "loss": 0.7113,
+ "step": 1255
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0446064155729538e-05,
+ "loss": 0.6989,
+ "step": 1256
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0433142972603378e-05,
+ "loss": 0.6793,
+ "step": 1257
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0420221064914592e-05,
+ "loss": 0.7064,
+ "step": 1258
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0407298454278983e-05,
+ "loss": 0.7381,
+ "step": 1259
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0394375162313519e-05,
+ "loss": 0.6665,
+ "step": 1260
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0381451210636321e-05,
+ "loss": 0.704,
+ "step": 1261
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0368526620866604e-05,
+ "loss": 0.6968,
+ "step": 1262
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0355601414624653e-05,
+ "loss": 0.7186,
+ "step": 1263
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0342675613531785e-05,
+ "loss": 0.6175,
+ "step": 1264
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0329749239210311e-05,
+ "loss": 0.7402,
+ "step": 1265
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0316822313283504e-05,
+ "loss": 0.6249,
+ "step": 1266
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0303894857375557e-05,
+ "loss": 0.7534,
+ "step": 1267
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0290966893111547e-05,
+ "loss": 0.6938,
+ "step": 1268
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0278038442117405e-05,
+ "loss": 0.6359,
+ "step": 1269
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0265109526019878e-05,
+ "loss": 0.7451,
+ "step": 1270
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.025218016644649e-05,
+ "loss": 0.7267,
+ "step": 1271
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0239250385025496e-05,
+ "loss": 0.669,
+ "step": 1272
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0226320203385877e-05,
+ "loss": 0.6472,
+ "step": 1273
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0213389643157268e-05,
+ "loss": 0.7005,
+ "step": 1274
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.020045872596994e-05,
+ "loss": 0.6913,
+ "step": 1275
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0187527473454764e-05,
+ "loss": 0.726,
+ "step": 1276
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0174595907243172e-05,
+ "loss": 0.7062,
+ "step": 1277
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.016166404896712e-05,
+ "loss": 0.5962,
+ "step": 1278
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.014873192025905e-05,
+ "loss": 0.7528,
+ "step": 1279
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0135799542751861e-05,
+ "loss": 0.7011,
+ "step": 1280
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0122866938078863e-05,
+ "loss": 0.6372,
+ "step": 1281
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0109934127873751e-05,
+ "loss": 0.6334,
+ "step": 1282
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0097001133770561e-05,
+ "loss": 0.7536,
+ "step": 1283
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0084067977403637e-05,
+ "loss": 0.7381,
+ "step": 1284
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0071134680407593e-05,
+ "loss": 0.5806,
+ "step": 1285
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0058201264417285e-05,
+ "loss": 0.7093,
+ "step": 1286
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0045267751067758e-05,
+ "loss": 0.6829,
+ "step": 1287
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0032334161994225e-05,
+ "loss": 0.7844,
+ "step": 1288
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0019400518832028e-05,
+ "loss": 0.6881,
+ "step": 1289
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0006466843216593e-05,
+ "loss": 0.6956,
+ "step": 1290
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.993533156783409e-06,
+ "loss": 0.6346,
+ "step": 1291
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.980599481167977e-06,
+ "loss": 0.7057,
+ "step": 1292
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.967665838005777e-06,
+ "loss": 0.7215,
+ "step": 1293
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.954732248932243e-06,
+ "loss": 0.6753,
+ "step": 1294
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.941798735582718e-06,
+ "loss": 0.7567,
+ "step": 1295
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.92886531959241e-06,
+ "loss": 0.6844,
+ "step": 1296
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.915932022596368e-06,
+ "loss": 0.6605,
+ "step": 1297
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.90299886622944e-06,
+ "loss": 0.6211,
+ "step": 1298
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.890065872126252e-06,
+ "loss": 0.715,
+ "step": 1299
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.877133061921138e-06,
+ "loss": 0.6978,
+ "step": 1300
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.864200457248144e-06,
+ "loss": 0.6383,
+ "step": 1301
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.851268079740951e-06,
+ "loss": 0.6712,
+ "step": 1302
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.838335951032883e-06,
+ "loss": 0.696,
+ "step": 1303
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.82540409275683e-06,
+ "loss": 0.623,
+ "step": 1304
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.81247252654524e-06,
+ "loss": 0.657,
+ "step": 1305
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.799541274030065e-06,
+ "loss": 0.6868,
+ "step": 1306
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.786610356842734e-06,
+ "loss": 0.645,
+ "step": 1307
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.773679796614124e-06,
+ "loss": 0.6588,
+ "step": 1308
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.760749614974506e-06,
+ "loss": 0.6743,
+ "step": 1309
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.747819833553516e-06,
+ "loss": 0.6128,
+ "step": 1310
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.734890473980122e-06,
+ "loss": 0.6667,
+ "step": 1311
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.721961557882597e-06,
+ "loss": 0.6599,
+ "step": 1312
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.709033106888456e-06,
+ "loss": 0.6981,
+ "step": 1313
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.696105142624448e-06,
+ "loss": 0.7234,
+ "step": 1314
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.683177686716501e-06,
+ "loss": 0.7089,
+ "step": 1315
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.670250760789692e-06,
+ "loss": 0.6594,
+ "step": 1316
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.657324386468217e-06,
+ "loss": 0.7767,
+ "step": 1317
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.644398585375352e-06,
+ "loss": 0.7641,
+ "step": 1318
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.6314733791334e-06,
+ "loss": 0.6762,
+ "step": 1319
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.61854878936368e-06,
+ "loss": 0.7169,
+ "step": 1320
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.605624837686483e-06,
+ "loss": 0.7482,
+ "step": 1321
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.592701545721022e-06,
+ "loss": 0.6473,
+ "step": 1322
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.57977893508541e-06,
+ "loss": 0.6729,
+ "step": 1323
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.566857027396627e-06,
+ "loss": 0.6265,
+ "step": 1324
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.553935844270464e-06,
+ "loss": 0.7421,
+ "step": 1325
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.541015407321514e-06,
+ "loss": 0.7286,
+ "step": 1326
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.528095738163117e-06,
+ "loss": 0.6587,
+ "step": 1327
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.515176858407323e-06,
+ "loss": 0.6696,
+ "step": 1328
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.502258789664865e-06,
+ "loss": 0.6711,
+ "step": 1329
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.489341553545121e-06,
+ "loss": 0.7447,
+ "step": 1330
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.47642517165608e-06,
+ "loss": 0.7053,
+ "step": 1331
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.46350966560429e-06,
+ "loss": 0.6059,
+ "step": 1332
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.450595056994848e-06,
+ "loss": 0.6334,
+ "step": 1333
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.437681367431335e-06,
+ "loss": 0.6278,
+ "step": 1334
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.424768618515809e-06,
+ "loss": 0.7077,
+ "step": 1335
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.411856831848745e-06,
+ "loss": 0.7211,
+ "step": 1336
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.398946029029015e-06,
+ "loss": 0.7562,
+ "step": 1337
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.386036231653836e-06,
+ "loss": 0.7211,
+ "step": 1338
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.37312746131875e-06,
+ "loss": 0.7262,
+ "step": 1339
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.360219739617585e-06,
+ "loss": 0.6379,
+ "step": 1340
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.347313088142403e-06,
+ "loss": 0.5732,
+ "step": 1341
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.334407528483487e-06,
+ "loss": 0.7226,
+ "step": 1342
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.321503082229281e-06,
+ "loss": 0.6958,
+ "step": 1343
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.308599770966385e-06,
+ "loss": 0.6887,
+ "step": 1344
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.295697616279483e-06,
+ "loss": 0.7044,
+ "step": 1345
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.282796639751333e-06,
+ "loss": 0.699,
+ "step": 1346
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.269896862962718e-06,
+ "loss": 0.7058,
+ "step": 1347
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.256998307492415e-06,
+ "loss": 0.7372,
+ "step": 1348
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.244100994917167e-06,
+ "loss": 0.7239,
+ "step": 1349
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.231204946811624e-06,
+ "loss": 0.5787,
+ "step": 1350
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.218310184748325e-06,
+ "loss": 0.6239,
+ "step": 1351
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.20541673029766e-06,
+ "loss": 0.6161,
+ "step": 1352
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.192524605027838e-06,
+ "loss": 0.6972,
+ "step": 1353
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.179633830504832e-06,
+ "loss": 0.754,
+ "step": 1354
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.166744428292362e-06,
+ "loss": 0.7415,
+ "step": 1355
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.153856419951854e-06,
+ "loss": 0.7791,
+ "step": 1356
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.140969827042392e-06,
+ "loss": 0.7447,
+ "step": 1357
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.128084671120709e-06,
+ "loss": 0.6897,
+ "step": 1358
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.115200973741123e-06,
+ "loss": 0.6918,
+ "step": 1359
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.102318756455513e-06,
+ "loss": 0.7557,
+ "step": 1360
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.089438040813282e-06,
+ "loss": 0.6692,
+ "step": 1361
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.076558848361328e-06,
+ "loss": 0.7041,
+ "step": 1362
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.063681200643993e-06,
+ "loss": 0.6908,
+ "step": 1363
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.050805119203035e-06,
+ "loss": 0.7037,
+ "step": 1364
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.037930625577598e-06,
+ "loss": 0.6786,
+ "step": 1365
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.025057741304164e-06,
+ "loss": 0.6348,
+ "step": 1366
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.012186487916526e-06,
+ "loss": 0.7015,
+ "step": 1367
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.999316886945752e-06,
+ "loss": 0.7216,
+ "step": 1368
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.986448959920137e-06,
+ "loss": 0.6494,
+ "step": 1369
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.97358272836518e-06,
+ "loss": 0.6428,
+ "step": 1370
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.96071821380355e-06,
+ "loss": 0.7302,
+ "step": 1371
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.947855437755036e-06,
+ "loss": 0.666,
+ "step": 1372
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.934994421736519e-06,
+ "loss": 0.6802,
+ "step": 1373
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.922135187261944e-06,
+ "loss": 0.7163,
+ "step": 1374
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.90927775584226e-06,
+ "loss": 0.6934,
+ "step": 1375
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.896422148985418e-06,
+ "loss": 0.6987,
+ "step": 1376
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.883568388196308e-06,
+ "loss": 0.6894,
+ "step": 1377
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.87071649497673e-06,
+ "loss": 0.664,
+ "step": 1378
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.85786649082536e-06,
+ "loss": 0.687,
+ "step": 1379
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.845018397237722e-06,
+ "loss": 0.7738,
+ "step": 1380
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.832172235706137e-06,
+ "loss": 0.7605,
+ "step": 1381
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.819328027719692e-06,
+ "loss": 0.6862,
+ "step": 1382
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.806485794764215e-06,
+ "loss": 0.7155,
+ "step": 1383
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.793645558322218e-06,
+ "loss": 0.6947,
+ "step": 1384
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.780807339872886e-06,
+ "loss": 0.7652,
+ "step": 1385
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.76797116089202e-06,
+ "loss": 0.7067,
+ "step": 1386
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.755137042852012e-06,
+ "loss": 0.6682,
+ "step": 1387
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.742305007221801e-06,
+ "loss": 0.6264,
+ "step": 1388
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.729475075466856e-06,
+ "loss": 0.738,
+ "step": 1389
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.716647269049115e-06,
+ "loss": 0.7141,
+ "step": 1390
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.703821609426963e-06,
+ "loss": 0.6528,
+ "step": 1391
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.690998118055193e-06,
+ "loss": 0.7106,
+ "step": 1392
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.678176816384975e-06,
+ "loss": 0.7204,
+ "step": 1393
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.665357725863816e-06,
+ "loss": 0.7355,
+ "step": 1394
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.65254086793552e-06,
+ "loss": 0.6963,
+ "step": 1395
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.639726264040159e-06,
+ "loss": 0.7263,
+ "step": 1396
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.626913935614026e-06,
+ "loss": 0.6098,
+ "step": 1397
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.61410390408963e-06,
+ "loss": 0.7582,
+ "step": 1398
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.601296190895611e-06,
+ "loss": 0.6922,
+ "step": 1399
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.588490817456749e-06,
+ "loss": 0.7528,
+ "step": 1400
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.575687805193902e-06,
+ "loss": 0.7138,
+ "step": 1401
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.562887175523977e-06,
+ "loss": 0.6996,
+ "step": 1402
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.550088949859906e-06,
+ "loss": 0.7408,
+ "step": 1403
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.53729314961059e-06,
+ "loss": 0.5894,
+ "step": 1404
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.52449979618087e-06,
+ "loss": 0.6789,
+ "step": 1405
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.511708910971506e-06,
+ "loss": 0.7069,
+ "step": 1406
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.498920515379112e-06,
+ "loss": 0.7547,
+ "step": 1407
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.486134630796157e-06,
+ "loss": 0.6827,
+ "step": 1408
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.4733512786109e-06,
+ "loss": 0.634,
+ "step": 1409
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.460570480207358e-06,
+ "loss": 0.6724,
+ "step": 1410
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.447792256965282e-06,
+ "loss": 0.6559,
+ "step": 1411
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.43501663026012e-06,
+ "loss": 0.6817,
+ "step": 1412
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.42224362146297e-06,
+ "loss": 0.6884,
+ "step": 1413
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.409473251940549e-06,
+ "loss": 0.6585,
+ "step": 1414
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.39670554305517e-06,
+ "loss": 0.6983,
+ "step": 1415
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.383940516164677e-06,
+ "loss": 0.7004,
+ "step": 1416
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.371178192622445e-06,
+ "loss": 0.7176,
+ "step": 1417
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.358418593777322e-06,
+ "loss": 0.6674,
+ "step": 1418
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.345661740973592e-06,
+ "loss": 0.6984,
+ "step": 1419
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.332907655550948e-06,
+ "loss": 0.6485,
+ "step": 1420
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.32015635884446e-06,
+ "loss": 0.6533,
+ "step": 1421
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.307407872184527e-06,
+ "loss": 0.6748,
+ "step": 1422
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.294662216896848e-06,
+ "loss": 0.6556,
+ "step": 1423
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.281919414302388e-06,
+ "loss": 0.6194,
+ "step": 1424
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.269179485717331e-06,
+ "loss": 0.7139,
+ "step": 1425
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.256442452453073e-06,
+ "loss": 0.6401,
+ "step": 1426
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.243708335816145e-06,
+ "loss": 0.7004,
+ "step": 1427
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.230977157108213e-06,
+ "loss": 0.633,
+ "step": 1428
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.218248937626018e-06,
+ "loss": 0.7269,
+ "step": 1429
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.205523698661365e-06,
+ "loss": 0.7099,
+ "step": 1430
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.19280146150106e-06,
+ "loss": 0.6306,
+ "step": 1431
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.180082247426895e-06,
+ "loss": 0.7135,
+ "step": 1432
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.167366077715598e-06,
+ "loss": 0.6903,
+ "step": 1433
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.15465297363881e-06,
+ "loss": 0.7208,
+ "step": 1434
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.141942956463049e-06,
+ "loss": 0.7039,
+ "step": 1435
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.129236047449654e-06,
+ "loss": 0.677,
+ "step": 1436
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.116532267854781e-06,
+ "loss": 0.6221,
+ "step": 1437
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.103831638929337e-06,
+ "loss": 0.7231,
+ "step": 1438
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.091134181918971e-06,
+ "loss": 0.6373,
+ "step": 1439
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.078439918064016e-06,
+ "loss": 0.7476,
+ "step": 1440
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.065748868599471e-06,
+ "loss": 0.7141,
+ "step": 1441
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.053061054754952e-06,
+ "loss": 0.6323,
+ "step": 1442
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.04037649775466e-06,
+ "loss": 0.7174,
+ "step": 1443
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.027695218817366e-06,
+ "loss": 0.6481,
+ "step": 1444
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.015017239156338e-06,
+ "loss": 0.6869,
+ "step": 1445
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.002342579979327e-06,
+ "loss": 0.6772,
+ "step": 1446
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.989671262488534e-06,
+ "loss": 0.6974,
+ "step": 1447
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.977003307880579e-06,
+ "loss": 0.7023,
+ "step": 1448
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.964338737346441e-06,
+ "loss": 0.7041,
+ "step": 1449
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.951677572071446e-06,
+ "loss": 0.7428,
+ "step": 1450
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.939019833235223e-06,
+ "loss": 0.6636,
+ "step": 1451
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.926365542011662e-06,
+ "loss": 0.7002,
+ "step": 1452
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.9137147195689e-06,
+ "loss": 0.693,
+ "step": 1453
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.901067387069264e-06,
+ "loss": 0.693,
+ "step": 1454
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.888423565669236e-06,
+ "loss": 0.7093,
+ "step": 1455
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.875783276519435e-06,
+ "loss": 0.7258,
+ "step": 1456
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.863146540764572e-06,
+ "loss": 0.7135,
+ "step": 1457
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.850513379543405e-06,
+ "loss": 0.6715,
+ "step": 1458
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.837883813988717e-06,
+ "loss": 0.634,
+ "step": 1459
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.825257865227278e-06,
+ "loss": 0.6738,
+ "step": 1460
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.812635554379802e-06,
+ "loss": 0.74,
+ "step": 1461
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.800016902560924e-06,
+ "loss": 0.7239,
+ "step": 1462
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.787401930879159e-06,
+ "loss": 0.6026,
+ "step": 1463
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.774790660436857e-06,
+ "loss": 0.6539,
+ "step": 1464
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.762183112330182e-06,
+ "loss": 0.6735,
+ "step": 1465
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.74957930764908e-06,
+ "loss": 0.7306,
+ "step": 1466
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.736979267477218e-06,
+ "loss": 0.7623,
+ "step": 1467
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.724383012891976e-06,
+ "loss": 0.6747,
+ "step": 1468
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.7117905649644e-06,
+ "loss": 0.6564,
+ "step": 1469
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.699201944759164e-06,
+ "loss": 0.683,
+ "step": 1470
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.686617173334547e-06,
+ "loss": 0.7416,
+ "step": 1471
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.674036271742388e-06,
+ "loss": 0.6835,
+ "step": 1472
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.661459261028045e-06,
+ "loss": 0.7603,
+ "step": 1473
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.648886162230373e-06,
+ "loss": 0.6504,
+ "step": 1474
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.63631699638168e-06,
+ "loss": 0.6985,
+ "step": 1475
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.623751784507707e-06,
+ "loss": 0.6696,
+ "step": 1476
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.6111905476275606e-06,
+ "loss": 0.7005,
+ "step": 1477
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.598633306753715e-06,
+ "loss": 0.7119,
+ "step": 1478
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.586080082891949e-06,
+ "loss": 0.6827,
+ "step": 1479
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.57353089704133e-06,
+ "loss": 0.6551,
+ "step": 1480
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.5609857701941645e-06,
+ "loss": 0.6936,
+ "step": 1481
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.548444723335975e-06,
+ "loss": 0.6802,
+ "step": 1482
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.535907777445449e-06,
+ "loss": 0.6485,
+ "step": 1483
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.523374953494422e-06,
+ "loss": 0.6586,
+ "step": 1484
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.510846272447839e-06,
+ "loss": 0.6641,
+ "step": 1485
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.498321755263704e-06,
+ "loss": 0.6307,
+ "step": 1486
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.485801422893057e-06,
+ "loss": 0.6755,
+ "step": 1487
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.473285296279942e-06,
+ "loss": 0.6797,
+ "step": 1488
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.460773396361371e-06,
+ "loss": 0.7302,
+ "step": 1489
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.448265744067275e-06,
+ "loss": 0.7323,
+ "step": 1490
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.435762360320491e-06,
+ "loss": 0.6731,
+ "step": 1491
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.423263266036705e-06,
+ "loss": 0.7572,
+ "step": 1492
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.410768482124431e-06,
+ "loss": 0.6739,
+ "step": 1493
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3982780294849795e-06,
+ "loss": 0.6767,
+ "step": 1494
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.38579192901241e-06,
+ "loss": 0.7141,
+ "step": 1495
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.373310201593498e-06,
+ "loss": 0.7204,
+ "step": 1496
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.360832868107708e-06,
+ "loss": 0.7751,
+ "step": 1497
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.348359949427159e-06,
+ "loss": 0.6852,
+ "step": 1498
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.335891466416574e-06,
+ "loss": 0.56,
+ "step": 1499
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.323427439933269e-06,
+ "loss": 0.6566,
+ "step": 1500
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.31096789082709e-06,
+ "loss": 0.7349,
+ "step": 1501
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2985128399404016e-06,
+ "loss": 0.715,
+ "step": 1502
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2860623081080506e-06,
+ "loss": 0.5972,
+ "step": 1503
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.273616316157312e-06,
+ "loss": 0.7061,
+ "step": 1504
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.261174884907874e-06,
+ "loss": 0.6949,
+ "step": 1505
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.248738035171787e-06,
+ "loss": 0.7098,
+ "step": 1506
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.236305787753454e-06,
+ "loss": 0.6458,
+ "step": 1507
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.223878163449563e-06,
+ "loss": 0.6401,
+ "step": 1508
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.211455183049072e-06,
+ "loss": 0.7907,
+ "step": 1509
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.199036867333179e-06,
+ "loss": 0.7432,
+ "step": 1510
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.186623237075265e-06,
+ "loss": 0.6803,
+ "step": 1511
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.174214313040888e-06,
+ "loss": 0.7481,
+ "step": 1512
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.161810115987727e-06,
+ "loss": 0.7145,
+ "step": 1513
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.14941066666555e-06,
+ "loss": 0.6793,
+ "step": 1514
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.137015985816183e-06,
+ "loss": 0.7073,
+ "step": 1515
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.124626094173484e-06,
+ "loss": 0.647,
+ "step": 1516
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.1122410124632935e-06,
+ "loss": 0.5615,
+ "step": 1517
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0998607614034035e-06,
+ "loss": 0.6673,
+ "step": 1518
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.087485361703532e-06,
+ "loss": 0.665,
+ "step": 1519
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.075114834065272e-06,
+ "loss": 0.7226,
+ "step": 1520
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.06274919918208e-06,
+ "loss": 0.6319,
+ "step": 1521
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0503884777392166e-06,
+ "loss": 0.7213,
+ "step": 1522
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0380326904137265e-06,
+ "loss": 0.6607,
+ "step": 1523
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.025681857874398e-06,
+ "loss": 0.6817,
+ "step": 1524
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.013336000781739e-06,
+ "loss": 0.6968,
+ "step": 1525
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.000995139787929e-06,
+ "loss": 0.6864,
+ "step": 1526
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.988659295536792e-06,
+ "loss": 0.6638,
+ "step": 1527
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9763284886637516e-06,
+ "loss": 0.7239,
+ "step": 1528
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.964002739795814e-06,
+ "loss": 0.6517,
+ "step": 1529
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9516820695515285e-06,
+ "loss": 0.6417,
+ "step": 1530
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.939366498540936e-06,
+ "loss": 0.7525,
+ "step": 1531
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.927056047365558e-06,
+ "loss": 0.7167,
+ "step": 1532
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.914750736618341e-06,
+ "loss": 0.6819,
+ "step": 1533
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9024505868836465e-06,
+ "loss": 0.6989,
+ "step": 1534
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.890155618737189e-06,
+ "loss": 0.6683,
+ "step": 1535
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.877865852746026e-06,
+ "loss": 0.6648,
+ "step": 1536
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.865581309468506e-06,
+ "loss": 0.6508,
+ "step": 1537
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.853302009454239e-06,
+ "loss": 0.7361,
+ "step": 1538
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.841027973244077e-06,
+ "loss": 0.671,
+ "step": 1539
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.828759221370049e-06,
+ "loss": 0.664,
+ "step": 1540
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.816495774355362e-06,
+ "loss": 0.6628,
+ "step": 1541
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.804237652714334e-06,
+ "loss": 0.6887,
+ "step": 1542
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7919848769523786e-06,
+ "loss": 0.6959,
+ "step": 1543
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.779737467565974e-06,
+ "loss": 0.7291,
+ "step": 1544
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.767495445042618e-06,
+ "loss": 0.6379,
+ "step": 1545
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.755258829860791e-06,
+ "loss": 0.6475,
+ "step": 1546
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.743027642489935e-06,
+ "loss": 0.6873,
+ "step": 1547
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.730801903390412e-06,
+ "loss": 0.6846,
+ "step": 1548
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.718581633013465e-06,
+ "loss": 0.671,
+ "step": 1549
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.706366851801194e-06,
+ "loss": 0.6644,
+ "step": 1550
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.694157580186516e-06,
+ "loss": 0.649,
+ "step": 1551
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.681953838593125e-06,
+ "loss": 0.68,
+ "step": 1552
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.669755647435474e-06,
+ "loss": 0.6287,
+ "step": 1553
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.657563027118732e-06,
+ "loss": 0.618,
+ "step": 1554
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.645375998038737e-06,
+ "loss": 0.7425,
+ "step": 1555
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.633194580581978e-06,
+ "loss": 0.6856,
+ "step": 1556
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.621018795125569e-06,
+ "loss": 0.6296,
+ "step": 1557
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6088486620371905e-06,
+ "loss": 0.7821,
+ "step": 1558
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.596684201675069e-06,
+ "loss": 0.7136,
+ "step": 1559
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.584525434387945e-06,
+ "loss": 0.6857,
+ "step": 1560
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.572372380515031e-06,
+ "loss": 0.7551,
+ "step": 1561
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.560225060385992e-06,
+ "loss": 0.6941,
+ "step": 1562
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.54808349432089e-06,
+ "loss": 0.6764,
+ "step": 1563
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.535947702630168e-06,
+ "loss": 0.727,
+ "step": 1564
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.523817705614602e-06,
+ "loss": 0.5695,
+ "step": 1565
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.51169352356529e-06,
+ "loss": 0.6763,
+ "step": 1566
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.499575176763591e-06,
+ "loss": 0.6184,
+ "step": 1567
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.487462685481103e-06,
+ "loss": 0.6486,
+ "step": 1568
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4753560699796305e-06,
+ "loss": 0.6916,
+ "step": 1569
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4632553505111484e-06,
+ "loss": 0.6838,
+ "step": 1570
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.451160547317777e-06,
+ "loss": 0.7143,
+ "step": 1571
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.439071680631725e-06,
+ "loss": 0.6941,
+ "step": 1572
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4269887706752845e-06,
+ "loss": 0.729,
+ "step": 1573
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.414911837660768e-06,
+ "loss": 0.7041,
+ "step": 1574
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.402840901790508e-06,
+ "loss": 0.6924,
+ "step": 1575
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.39077598325679e-06,
+ "loss": 0.6804,
+ "step": 1576
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.378717102241842e-06,
+ "loss": 0.6853,
+ "step": 1577
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.366664278917789e-06,
+ "loss": 0.7923,
+ "step": 1578
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3546175334466185e-06,
+ "loss": 0.7074,
+ "step": 1579
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.342576885980165e-06,
+ "loss": 0.7078,
+ "step": 1580
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.330542356660047e-06,
+ "loss": 0.6143,
+ "step": 1581
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.318513965617657e-06,
+ "loss": 0.6558,
+ "step": 1582
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.306491732974111e-06,
+ "loss": 0.6495,
+ "step": 1583
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2944756788402394e-06,
+ "loss": 0.6719,
+ "step": 1584
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.282465823316519e-06,
+ "loss": 0.6856,
+ "step": 1585
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.270462186493071e-06,
+ "loss": 0.6369,
+ "step": 1586
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2584647884496034e-06,
+ "loss": 0.6776,
+ "step": 1587
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.246473649255391e-06,
+ "loss": 0.7309,
+ "step": 1588
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.234488788969251e-06,
+ "loss": 0.667,
+ "step": 1589
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.22251022763948e-06,
+ "loss": 0.7149,
+ "step": 1590
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.2105379853038436e-06,
+ "loss": 0.6952,
+ "step": 1591
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.19857208198954e-06,
+ "loss": 0.6295,
+ "step": 1592
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.186612537713164e-06,
+ "loss": 0.7524,
+ "step": 1593
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1746593724806666e-06,
+ "loss": 0.6547,
+ "step": 1594
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.162712606287335e-06,
+ "loss": 0.6421,
+ "step": 1595
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.150772259117748e-06,
+ "loss": 0.6875,
+ "step": 1596
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.138838350945744e-06,
+ "loss": 0.6819,
+ "step": 1597
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.126910901734397e-06,
+ "loss": 0.6575,
+ "step": 1598
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.114989931435976e-06,
+ "loss": 0.707,
+ "step": 1599
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.103075459991905e-06,
+ "loss": 0.7111,
+ "step": 1600
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0911675073327385e-06,
+ "loss": 0.7314,
+ "step": 1601
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.079266093378138e-06,
+ "loss": 0.6706,
+ "step": 1602
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.067371238036812e-06,
+ "loss": 0.7071,
+ "step": 1603
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.055482961206503e-06,
+ "loss": 0.7422,
+ "step": 1604
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.043601282773952e-06,
+ "loss": 0.732,
+ "step": 1605
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.031726222614855e-06,
+ "loss": 0.642,
+ "step": 1606
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.019857800593847e-06,
+ "loss": 0.6497,
+ "step": 1607
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.007996036564454e-06,
+ "loss": 0.6338,
+ "step": 1608
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9961409503690605e-06,
+ "loss": 0.6868,
+ "step": 1609
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.984292561838881e-06,
+ "loss": 0.7424,
+ "step": 1610
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9724508907939345e-06,
+ "loss": 0.636,
+ "step": 1611
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.960615957042999e-06,
+ "loss": 0.6338,
+ "step": 1612
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.948787780383574e-06,
+ "loss": 0.6044,
+ "step": 1613
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.936966380601867e-06,
+ "loss": 0.6836,
+ "step": 1614
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.92515177747274e-06,
+ "loss": 0.6446,
+ "step": 1615
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.913343990759695e-06,
+ "loss": 0.7554,
+ "step": 1616
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.901543040214823e-06,
+ "loss": 0.7045,
+ "step": 1617
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.889748945578788e-06,
+ "loss": 0.7306,
+ "step": 1618
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.877961726580773e-06,
+ "loss": 0.6662,
+ "step": 1619
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.866181402938469e-06,
+ "loss": 0.6709,
+ "step": 1620
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.854407994358037e-06,
+ "loss": 0.6846,
+ "step": 1621
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.842641520534055e-06,
+ "loss": 0.7122,
+ "step": 1622
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.830882001149517e-06,
+ "loss": 0.7134,
+ "step": 1623
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.819129455875764e-06,
+ "loss": 0.6846,
+ "step": 1624
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8073839043724935e-06,
+ "loss": 0.651,
+ "step": 1625
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7956453662876935e-06,
+ "loss": 0.6689,
+ "step": 1626
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.783913861257611e-06,
+ "loss": 0.6747,
+ "step": 1627
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.772189408906739e-06,
+ "loss": 0.6545,
+ "step": 1628
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.760472028847773e-06,
+ "loss": 0.7081,
+ "step": 1629
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.748761740681573e-06,
+ "loss": 0.6908,
+ "step": 1630
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.737058563997138e-06,
+ "loss": 0.6248,
+ "step": 1631
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.725362518371576e-06,
+ "loss": 0.6172,
+ "step": 1632
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.713673623370046e-06,
+ "loss": 0.7342,
+ "step": 1633
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7019918985457745e-06,
+ "loss": 0.6993,
+ "step": 1634
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6903173634399785e-06,
+ "loss": 0.7109,
+ "step": 1635
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.678650037581841e-06,
+ "loss": 0.6115,
+ "step": 1636
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.666989940488496e-06,
+ "loss": 0.6896,
+ "step": 1637
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.655337091664985e-06,
+ "loss": 0.6753,
+ "step": 1638
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.643691510604221e-06,
+ "loss": 0.6297,
+ "step": 1639
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.632053216786959e-06,
+ "loss": 0.651,
+ "step": 1640
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.620422229681771e-06,
+ "loss": 0.6685,
+ "step": 1641
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.608798568744989e-06,
+ "loss": 0.666,
+ "step": 1642
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.597182253420712e-06,
+ "loss": 0.6243,
+ "step": 1643
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.585573303140741e-06,
+ "loss": 0.6522,
+ "step": 1644
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.573971737324551e-06,
+ "loss": 0.6861,
+ "step": 1645
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.56237757537927e-06,
+ "loss": 0.6364,
+ "step": 1646
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.550790836699642e-06,
+ "loss": 0.6029,
+ "step": 1647
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.539211540667992e-06,
+ "loss": 0.6462,
+ "step": 1648
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.527639706654195e-06,
+ "loss": 0.714,
+ "step": 1649
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.516075354015646e-06,
+ "loss": 0.7084,
+ "step": 1650
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.504518502097212e-06,
+ "loss": 0.7171,
+ "step": 1651
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4929691702312395e-06,
+ "loss": 0.6553,
+ "step": 1652
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.481427377737465e-06,
+ "loss": 0.6455,
+ "step": 1653
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4698931439230314e-06,
+ "loss": 0.6901,
+ "step": 1654
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.458366488082436e-06,
+ "loss": 0.6949,
+ "step": 1655
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.446847429497494e-06,
+ "loss": 0.6834,
+ "step": 1656
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.435335987437316e-06,
+ "loss": 0.7484,
+ "step": 1657
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.423832181158274e-06,
+ "loss": 0.669,
+ "step": 1658
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.412336029903956e-06,
+ "loss": 0.73,
+ "step": 1659
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.400847552905152e-06,
+ "loss": 0.7043,
+ "step": 1660
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.3893667693798245e-06,
+ "loss": 0.7214,
+ "step": 1661
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.377893698533048e-06,
+ "loss": 0.7089,
+ "step": 1662
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.366428359557003e-06,
+ "loss": 0.6707,
+ "step": 1663
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.354970771630945e-06,
+ "loss": 0.668,
+ "step": 1664
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.34352095392114e-06,
+ "loss": 0.6182,
+ "step": 1665
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.332078925580883e-06,
+ "loss": 0.6481,
+ "step": 1666
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.320644705750431e-06,
+ "loss": 0.7033,
+ "step": 1667
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.309218313556961e-06,
+ "loss": 0.6702,
+ "step": 1668
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.297799768114575e-06,
+ "loss": 0.6675,
+ "step": 1669
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.286389088524253e-06,
+ "loss": 0.6136,
+ "step": 1670
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.274986293873796e-06,
+ "loss": 0.6822,
+ "step": 1671
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.263591403237831e-06,
+ "loss": 0.6133,
+ "step": 1672
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.252204435677763e-06,
+ "loss": 0.6395,
+ "step": 1673
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2408254102417275e-06,
+ "loss": 0.6992,
+ "step": 1674
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2294543459646e-06,
+ "loss": 0.6346,
+ "step": 1675
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2180912618679195e-06,
+ "loss": 0.5956,
+ "step": 1676
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.206736176959877e-06,
+ "loss": 0.6515,
+ "step": 1677
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.19538911023529e-06,
+ "loss": 0.7051,
+ "step": 1678
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.184050080675558e-06,
+ "loss": 0.7159,
+ "step": 1679
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1727191072486385e-06,
+ "loss": 0.6304,
+ "step": 1680
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.161396208909012e-06,
+ "loss": 0.6531,
+ "step": 1681
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.150081404597654e-06,
+ "loss": 0.6541,
+ "step": 1682
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.138774713241983e-06,
+ "loss": 0.6782,
+ "step": 1683
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.127476153755873e-06,
+ "loss": 0.6293,
+ "step": 1684
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.116185745039585e-06,
+ "loss": 0.6962,
+ "step": 1685
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.10490350597973e-06,
+ "loss": 0.6201,
+ "step": 1686
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.093629455449273e-06,
+ "loss": 0.6579,
+ "step": 1687
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.082363612307471e-06,
+ "loss": 0.7357,
+ "step": 1688
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.071105995399852e-06,
+ "loss": 0.5901,
+ "step": 1689
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.059856623558189e-06,
+ "loss": 0.6975,
+ "step": 1690
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0486155156004615e-06,
+ "loss": 0.6361,
+ "step": 1691
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0373826903308095e-06,
+ "loss": 0.6902,
+ "step": 1692
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.026158166539548e-06,
+ "loss": 0.6761,
+ "step": 1693
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.014941963003075e-06,
+ "loss": 0.6557,
+ "step": 1694
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.003734098483887e-06,
+ "loss": 0.682,
+ "step": 1695
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.992534591730527e-06,
+ "loss": 0.6713,
+ "step": 1696
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.981343461477559e-06,
+ "loss": 0.7376,
+ "step": 1697
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.970160726445531e-06,
+ "loss": 0.682,
+ "step": 1698
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9589864053409545e-06,
+ "loss": 0.6766,
+ "step": 1699
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9478205168562535e-06,
+ "loss": 0.7562,
+ "step": 1700
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.936663079669753e-06,
+ "loss": 0.6514,
+ "step": 1701
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.925514112445653e-06,
+ "loss": 0.7442,
+ "step": 1702
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.914373633833962e-06,
+ "loss": 0.6804,
+ "step": 1703
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9032416624705036e-06,
+ "loss": 0.7052,
+ "step": 1704
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.892118216976871e-06,
+ "loss": 0.6387,
+ "step": 1705
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.881003315960378e-06,
+ "loss": 0.7031,
+ "step": 1706
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.869896978014071e-06,
+ "loss": 0.6499,
+ "step": 1707
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.858799221716659e-06,
+ "loss": 0.6845,
+ "step": 1708
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.847710065632489e-06,
+ "loss": 0.6311,
+ "step": 1709
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.836629528311527e-06,
+ "loss": 0.7011,
+ "step": 1710
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.8255576282893355e-06,
+ "loss": 0.6767,
+ "step": 1711
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.814494384087004e-06,
+ "loss": 0.6893,
+ "step": 1712
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.803439814211158e-06,
+ "loss": 0.6652,
+ "step": 1713
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.792393937153914e-06,
+ "loss": 0.6802,
+ "step": 1714
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.78135677139283e-06,
+ "loss": 0.7172,
+ "step": 1715
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.770328335390915e-06,
+ "loss": 0.7511,
+ "step": 1716
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.759308647596564e-06,
+ "loss": 0.6871,
+ "step": 1717
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7482977264435305e-06,
+ "loss": 0.7501,
+ "step": 1718
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.737295590350913e-06,
+ "loss": 0.6433,
+ "step": 1719
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.726302257723113e-06,
+ "loss": 0.7348,
+ "step": 1720
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7153177469498045e-06,
+ "loss": 0.7438,
+ "step": 1721
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.704342076405902e-06,
+ "loss": 0.7315,
+ "step": 1722
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6933752644515404e-06,
+ "loss": 0.7445,
+ "step": 1723
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.682417329432016e-06,
+ "loss": 0.6336,
+ "step": 1724
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.671468289677802e-06,
+ "loss": 0.6899,
+ "step": 1725
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.660528163504479e-06,
+ "loss": 0.7473,
+ "step": 1726
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6495969692127084e-06,
+ "loss": 0.6591,
+ "step": 1727
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6386747250882226e-06,
+ "loss": 0.6735,
+ "step": 1728
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6277614494017784e-06,
+ "loss": 0.666,
+ "step": 1729
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.616857160409131e-06,
+ "loss": 0.6789,
+ "step": 1730
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.605961876351001e-06,
+ "loss": 0.6489,
+ "step": 1731
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.595075615453049e-06,
+ "loss": 0.7276,
+ "step": 1732
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.584198395925829e-06,
+ "loss": 0.7292,
+ "step": 1733
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5733302359647955e-06,
+ "loss": 0.7242,
+ "step": 1734
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.562471153750221e-06,
+ "loss": 0.6776,
+ "step": 1735
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.55162116744721e-06,
+ "loss": 0.7364,
+ "step": 1736
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5407802952056435e-06,
+ "loss": 0.7191,
+ "step": 1737
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.529948555160162e-06,
+ "loss": 0.7148,
+ "step": 1738
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.519125965430125e-06,
+ "loss": 0.6632,
+ "step": 1739
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.508312544119592e-06,
+ "loss": 0.7307,
+ "step": 1740
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.497508309317271e-06,
+ "loss": 0.705,
+ "step": 1741
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.486713279096515e-06,
+ "loss": 0.7034,
+ "step": 1742
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.475927471515287e-06,
+ "loss": 0.6672,
+ "step": 1743
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.465150904616101e-06,
+ "loss": 0.614,
+ "step": 1744
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.454383596426028e-06,
+ "loss": 0.6489,
+ "step": 1745
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.443625564956648e-06,
+ "loss": 0.6973,
+ "step": 1746
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.432876828204022e-06,
+ "loss": 0.7163,
+ "step": 1747
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4221374041486654e-06,
+ "loss": 0.6986,
+ "step": 1748
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.411407310755513e-06,
+ "loss": 0.7546,
+ "step": 1749
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.400686565973886e-06,
+ "loss": 0.6168,
+ "step": 1750
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.389975187737472e-06,
+ "loss": 0.6861,
+ "step": 1751
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3792731939643015e-06,
+ "loss": 0.6757,
+ "step": 1752
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.368580602556687e-06,
+ "loss": 0.6615,
+ "step": 1753
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3578974314012225e-06,
+ "loss": 0.6795,
+ "step": 1754
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.347223698368748e-06,
+ "loss": 0.7273,
+ "step": 1755
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.336559421314298e-06,
+ "loss": 0.6269,
+ "step": 1756
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.325904618077114e-06,
+ "loss": 0.6788,
+ "step": 1757
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.315259306480575e-06,
+ "loss": 0.7263,
+ "step": 1758
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.304623504332178e-06,
+ "loss": 0.7434,
+ "step": 1759
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.293997229423523e-06,
+ "loss": 0.6652,
+ "step": 1760
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.283380499530269e-06,
+ "loss": 0.7333,
+ "step": 1761
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2727733324121055e-06,
+ "loss": 0.6379,
+ "step": 1762
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.262175745812729e-06,
+ "loss": 0.6696,
+ "step": 1763
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.251587757459813e-06,
+ "loss": 0.6714,
+ "step": 1764
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.241009385064958e-06,
+ "loss": 0.7487,
+ "step": 1765
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2304406463237026e-06,
+ "loss": 0.6666,
+ "step": 1766
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.219881558915458e-06,
+ "loss": 0.7077,
+ "step": 1767
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.209332140503486e-06,
+ "loss": 0.6911,
+ "step": 1768
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.198792408734882e-06,
+ "loss": 0.6486,
+ "step": 1769
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1882623812405345e-06,
+ "loss": 0.6362,
+ "step": 1770
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1777420756351e-06,
+ "loss": 0.6537,
+ "step": 1771
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1672315095169725e-06,
+ "loss": 0.6595,
+ "step": 1772
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1567307004682546e-06,
+ "loss": 0.6062,
+ "step": 1773
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.146239666054716e-06,
+ "loss": 0.6798,
+ "step": 1774
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.135758423825798e-06,
+ "loss": 0.7217,
+ "step": 1775
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.125286991314538e-06,
+ "loss": 0.6591,
+ "step": 1776
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1148253860375764e-06,
+ "loss": 0.7172,
+ "step": 1777
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.104373625495114e-06,
+ "loss": 0.6432,
+ "step": 1778
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.09393172717088e-06,
+ "loss": 0.6345,
+ "step": 1779
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.083499708532108e-06,
+ "loss": 0.6872,
+ "step": 1780
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.073077587029509e-06,
+ "loss": 0.6374,
+ "step": 1781
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0626653800972236e-06,
+ "loss": 0.7046,
+ "step": 1782
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0522631051528205e-06,
+ "loss": 0.6766,
+ "step": 1783
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0418707795972575e-06,
+ "loss": 0.6547,
+ "step": 1784
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.031488420814834e-06,
+ "loss": 0.7285,
+ "step": 1785
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.021116046173189e-06,
+ "loss": 0.6674,
+ "step": 1786
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.010753673023254e-06,
+ "loss": 0.5786,
+ "step": 1787
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.000401318699236e-06,
+ "loss": 0.7023,
+ "step": 1788
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9900590005185735e-06,
+ "loss": 0.67,
+ "step": 1789
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9797267357819294e-06,
+ "loss": 0.716,
+ "step": 1790
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.969404541773132e-06,
+ "loss": 0.6724,
+ "step": 1791
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.959092435759174e-06,
+ "loss": 0.695,
+ "step": 1792
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.9487904349901815e-06,
+ "loss": 0.6837,
+ "step": 1793
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.938498556699357e-06,
+ "loss": 0.6544,
+ "step": 1794
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.928216818102982e-06,
+ "loss": 0.7146,
+ "step": 1795
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.917945236400374e-06,
+ "loss": 0.6849,
+ "step": 1796
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.907683828773861e-06,
+ "loss": 0.6909,
+ "step": 1797
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.897432612388752e-06,
+ "loss": 0.6958,
+ "step": 1798
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8871916043933065e-06,
+ "loss": 0.6176,
+ "step": 1799
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.876960821918704e-06,
+ "loss": 0.653,
+ "step": 1800
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.866740282079023e-06,
+ "loss": 0.6818,
+ "step": 1801
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8565300019712115e-06,
+ "loss": 0.6919,
+ "step": 1802
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.846329998675048e-06,
+ "loss": 0.7064,
+ "step": 1803
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.836140289253124e-06,
+ "loss": 0.592,
+ "step": 1804
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8259608907508105e-06,
+ "loss": 0.6205,
+ "step": 1805
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.81579182019623e-06,
+ "loss": 0.7462,
+ "step": 1806
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8056330946002283e-06,
+ "loss": 0.7299,
+ "step": 1807
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7954847309563504e-06,
+ "loss": 0.7064,
+ "step": 1808
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7853467462407966e-06,
+ "loss": 0.6426,
+ "step": 1809
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7752191574124176e-06,
+ "loss": 0.6786,
+ "step": 1810
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7651019814126656e-06,
+ "loss": 0.6807,
+ "step": 1811
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7549952351655795e-06,
+ "loss": 0.6862,
+ "step": 1812
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7448989355777497e-06,
+ "loss": 0.6763,
+ "step": 1813
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.734813099538289e-06,
+ "loss": 0.6925,
+ "step": 1814
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.724737743918809e-06,
+ "loss": 0.6703,
+ "step": 1815
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7146728855733947e-06,
+ "loss": 0.7065,
+ "step": 1816
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.7046185413385573e-06,
+ "loss": 0.6716,
+ "step": 1817
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.694574728033231e-06,
+ "loss": 0.7259,
+ "step": 1818
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6845414624587326e-06,
+ "loss": 0.6714,
+ "step": 1819
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.674518761398733e-06,
+ "loss": 0.5993,
+ "step": 1820
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.664506641619232e-06,
+ "loss": 0.6699,
+ "step": 1821
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.65450511986853e-06,
+ "loss": 0.6568,
+ "step": 1822
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.644514212877187e-06,
+ "loss": 0.6742,
+ "step": 1823
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6345339373580267e-06,
+ "loss": 0.6741,
+ "step": 1824
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.62456431000608e-06,
+ "loss": 0.7003,
+ "step": 1825
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6146053474985565e-06,
+ "loss": 0.718,
+ "step": 1826
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6046570664948357e-06,
+ "loss": 0.7227,
+ "step": 1827
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5947194836364264e-06,
+ "loss": 0.7127,
+ "step": 1828
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5847926155469427e-06,
+ "loss": 0.713,
+ "step": 1829
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.574876478832072e-06,
+ "loss": 0.585,
+ "step": 1830
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5649710900795564e-06,
+ "loss": 0.6864,
+ "step": 1831
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5550764658591486e-06,
+ "loss": 0.7154,
+ "step": 1832
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5451926227225997e-06,
+ "loss": 0.633,
+ "step": 1833
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5353195772036352e-06,
+ "loss": 0.6982,
+ "step": 1834
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.525457345817902e-06,
+ "loss": 0.6182,
+ "step": 1835
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5156059450629677e-06,
+ "loss": 0.697,
+ "step": 1836
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5057653914182787e-06,
+ "loss": 0.6626,
+ "step": 1837
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4959357013451368e-06,
+ "loss": 0.6936,
+ "step": 1838
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.486116891286674e-06,
+ "loss": 0.7063,
+ "step": 1839
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4763089776678206e-06,
+ "loss": 0.6365,
+ "step": 1840
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4665119768952736e-06,
+ "loss": 0.694,
+ "step": 1841
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.456725905357482e-06,
+ "loss": 0.6458,
+ "step": 1842
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4469507794246105e-06,
+ "loss": 0.6356,
+ "step": 1843
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4371866154485147e-06,
+ "loss": 0.6742,
+ "step": 1844
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.42743342976271e-06,
+ "loss": 0.7007,
+ "step": 1845
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.417691238682351e-06,
+ "loss": 0.6199,
+ "step": 1846
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4079600585041993e-06,
+ "loss": 0.6948,
+ "step": 1847
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3982399055065953e-06,
+ "loss": 0.6847,
+ "step": 1848
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3885307959494374e-06,
+ "loss": 0.6517,
+ "step": 1849
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.378832746074143e-06,
+ "loss": 0.6781,
+ "step": 1850
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.369145772103637e-06,
+ "loss": 0.7105,
+ "step": 1851
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.359469890242313e-06,
+ "loss": 0.6411,
+ "step": 1852
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.349805116676008e-06,
+ "loss": 0.6623,
+ "step": 1853
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3401514675719815e-06,
+ "loss": 0.6063,
+ "step": 1854
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3305089590788796e-06,
+ "loss": 0.7267,
+ "step": 1855
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3208776073267146e-06,
+ "loss": 0.6729,
+ "step": 1856
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3112574284268395e-06,
+ "loss": 0.6448,
+ "step": 1857
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.301648438471905e-06,
+ "loss": 0.6505,
+ "step": 1858
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2920506535358566e-06,
+ "loss": 0.6564,
+ "step": 1859
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2824640896738935e-06,
+ "loss": 0.6825,
+ "step": 1860
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2728887629224415e-06,
+ "loss": 0.7094,
+ "step": 1861
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.263324689299132e-06,
+ "loss": 0.6723,
+ "step": 1862
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2537718848027744e-06,
+ "loss": 0.6382,
+ "step": 1863
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2442303654133124e-06,
+ "loss": 0.6168,
+ "step": 1864
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.234700147091834e-06,
+ "loss": 0.6095,
+ "step": 1865
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.22518124578051e-06,
+ "loss": 0.7224,
+ "step": 1866
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.2156736774025766e-06,
+ "loss": 0.713,
+ "step": 1867
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.206177457862321e-06,
+ "loss": 0.7005,
+ "step": 1868
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1966926030450408e-06,
+ "loss": 0.7298,
+ "step": 1869
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.187219128817025e-06,
+ "loss": 0.6716,
+ "step": 1870
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1777570510255262e-06,
+ "loss": 0.7283,
+ "step": 1871
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.168306385498733e-06,
+ "loss": 0.6554,
+ "step": 1872
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1588671480457324e-06,
+ "loss": 0.74,
+ "step": 1873
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.149439354456514e-06,
+ "loss": 0.6014,
+ "step": 1874
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1400230205019124e-06,
+ "loss": 0.6786,
+ "step": 1875
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1306181619335894e-06,
+ "loss": 0.7073,
+ "step": 1876
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.121224794484019e-06,
+ "loss": 0.6562,
+ "step": 1877
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1118429338664467e-06,
+ "loss": 0.6157,
+ "step": 1878
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.102472595774875e-06,
+ "loss": 0.5941,
+ "step": 1879
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.093113795884026e-06,
+ "loss": 0.6177,
+ "step": 1880
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0837665498493273e-06,
+ "loss": 0.6888,
+ "step": 1881
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.074430873306865e-06,
+ "loss": 0.6168,
+ "step": 1882
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0651067818733948e-06,
+ "loss": 0.7151,
+ "step": 1883
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.05579429114627e-06,
+ "loss": 0.6962,
+ "step": 1884
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.046493416703452e-06,
+ "loss": 0.6787,
+ "step": 1885
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0372041741034642e-06,
+ "loss": 0.603,
+ "step": 1886
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0279265788853763e-06,
+ "loss": 0.6678,
+ "step": 1887
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0186606465687705e-06,
+ "loss": 0.6468,
+ "step": 1888
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0094063926537233e-06,
+ "loss": 0.698,
+ "step": 1889
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.000163832620775e-06,
+ "loss": 0.6156,
+ "step": 1890
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.990932981930893e-06,
+ "loss": 0.588,
+ "step": 1891
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.98171385602548e-06,
+ "loss": 0.621,
+ "step": 1892
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.972506470326303e-06,
+ "loss": 0.5919,
+ "step": 1893
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9633108402355027e-06,
+ "loss": 0.6834,
+ "step": 1894
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.95412698113555e-06,
+ "loss": 0.7228,
+ "step": 1895
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.944954908389229e-06,
+ "loss": 0.7329,
+ "step": 1896
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.935794637339605e-06,
+ "loss": 0.6899,
+ "step": 1897
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9266461833100047e-06,
+ "loss": 0.7258,
+ "step": 1898
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.917509561603977e-06,
+ "loss": 0.7335,
+ "step": 1899
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9083847875052894e-06,
+ "loss": 0.6796,
+ "step": 1900
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.899271876277887e-06,
+ "loss": 0.6627,
+ "step": 1901
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.890170843165868e-06,
+ "loss": 0.6368,
+ "step": 1902
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.881081703393466e-06,
+ "loss": 0.6972,
+ "step": 1903
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8720044721650155e-06,
+ "loss": 0.6917,
+ "step": 1904
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.862939164664924e-06,
+ "loss": 0.7007,
+ "step": 1905
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8538857960576694e-06,
+ "loss": 0.6081,
+ "step": 1906
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.844844381487748e-06,
+ "loss": 0.6639,
+ "step": 1907
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.835814936079655e-06,
+ "loss": 0.7297,
+ "step": 1908
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8267974749378714e-06,
+ "loss": 0.587,
+ "step": 1909
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8177920131468273e-06,
+ "loss": 0.6426,
+ "step": 1910
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.808798565770883e-06,
+ "loss": 0.6864,
+ "step": 1911
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.799817147854299e-06,
+ "loss": 0.6438,
+ "step": 1912
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.790847774421215e-06,
+ "loss": 0.6162,
+ "step": 1913
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7818904604756125e-06,
+ "loss": 0.6957,
+ "step": 1914
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7729452210013173e-06,
+ "loss": 0.6697,
+ "step": 1915
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.764012070961948e-06,
+ "loss": 0.6727,
+ "step": 1916
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7550910253008933e-06,
+ "loss": 0.6875,
+ "step": 1917
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7461820989413036e-06,
+ "loss": 0.6681,
+ "step": 1918
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.737285306786053e-06,
+ "loss": 0.7507,
+ "step": 1919
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7284006637177164e-06,
+ "loss": 0.6969,
+ "step": 1920
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7195281845985467e-06,
+ "loss": 0.6512,
+ "step": 1921
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7106678842704514e-06,
+ "loss": 0.6819,
+ "step": 1922
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.701819777554956e-06,
+ "loss": 0.604,
+ "step": 1923
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6929838792532035e-06,
+ "loss": 0.6695,
+ "step": 1924
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.684160204145899e-06,
+ "loss": 0.6242,
+ "step": 1925
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.675348766993311e-06,
+ "loss": 0.6749,
+ "step": 1926
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.666549582535233e-06,
+ "loss": 0.6846,
+ "step": 1927
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6577626654909617e-06,
+ "loss": 0.6683,
+ "step": 1928
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.648988030559274e-06,
+ "loss": 0.6779,
+ "step": 1929
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.640225692418401e-06,
+ "loss": 0.7126,
+ "step": 1930
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6314756657260053e-06,
+ "loss": 0.6897,
+ "step": 1931
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.622737965119145e-06,
+ "loss": 0.6699,
+ "step": 1932
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6140126052142788e-06,
+ "loss": 0.7532,
+ "step": 1933
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6052996006072008e-06,
+ "loss": 0.5257,
+ "step": 1934
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5965989658730485e-06,
+ "loss": 0.6552,
+ "step": 1935
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5879107155662676e-06,
+ "loss": 0.6543,
+ "step": 1936
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5792348642205823e-06,
+ "loss": 0.6137,
+ "step": 1937
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.570571426348978e-06,
+ "loss": 0.6618,
+ "step": 1938
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5619204164436796e-06,
+ "loss": 0.6431,
+ "step": 1939
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.553281848976109e-06,
+ "loss": 0.7341,
+ "step": 1940
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.544655738396885e-06,
+ "loss": 0.6282,
+ "step": 1941
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.536042099135795e-06,
+ "loss": 0.6754,
+ "step": 1942
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.527440945601747e-06,
+ "loss": 0.683,
+ "step": 1943
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.518852292182774e-06,
+ "loss": 0.5835,
+ "step": 1944
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5102761532460005e-06,
+ "loss": 0.6472,
+ "step": 1945
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.501712543137602e-06,
+ "loss": 0.699,
+ "step": 1946
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4931614761828182e-06,
+ "loss": 0.7149,
+ "step": 1947
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.484622966685892e-06,
+ "loss": 0.6452,
+ "step": 1948
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.476097028930058e-06,
+ "loss": 0.6099,
+ "step": 1949
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4675836771775265e-06,
+ "loss": 0.6269,
+ "step": 1950
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4590829256694615e-06,
+ "loss": 0.6662,
+ "step": 1951
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.450594788625932e-06,
+ "loss": 0.6949,
+ "step": 1952
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.442119280245917e-06,
+ "loss": 0.6451,
+ "step": 1953
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.433656414707272e-06,
+ "loss": 0.6177,
+ "step": 1954
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.425206206166688e-06,
+ "loss": 0.7183,
+ "step": 1955
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.416768668759705e-06,
+ "loss": 0.6342,
+ "step": 1956
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.408343816600657e-06,
+ "loss": 0.6453,
+ "step": 1957
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3999316637826486e-06,
+ "loss": 0.7295,
+ "step": 1958
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3915322243775564e-06,
+ "loss": 0.6968,
+ "step": 1959
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3831455124359793e-06,
+ "loss": 0.6339,
+ "step": 1960
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.374771541987232e-06,
+ "loss": 0.7278,
+ "step": 1961
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3664103270393123e-06,
+ "loss": 0.6902,
+ "step": 1962
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.358061881578885e-06,
+ "loss": 0.6355,
+ "step": 1963
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.349726219571239e-06,
+ "loss": 0.621,
+ "step": 1964
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3414033549603022e-06,
+ "loss": 0.6438,
+ "step": 1965
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3330933016685753e-06,
+ "loss": 0.6355,
+ "step": 1966
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3247960735971385e-06,
+ "loss": 0.618,
+ "step": 1967
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3165116846256154e-06,
+ "loss": 0.6635,
+ "step": 1968
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.308240148612152e-06,
+ "loss": 0.6272,
+ "step": 1969
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.299981479393393e-06,
+ "loss": 0.7038,
+ "step": 1970
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.291735690784461e-06,
+ "loss": 0.6614,
+ "step": 1971
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2835027965789345e-06,
+ "loss": 0.6224,
+ "step": 1972
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.275282810548811e-06,
+ "loss": 0.6706,
+ "step": 1973
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2670757464445146e-06,
+ "loss": 0.5771,
+ "step": 1974
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2588816179948324e-06,
+ "loss": 0.6321,
+ "step": 1975
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2507004389069267e-06,
+ "loss": 0.6861,
+ "step": 1976
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2425322228662914e-06,
+ "loss": 0.6133,
+ "step": 1977
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2343769835367425e-06,
+ "loss": 0.6687,
+ "step": 1978
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.226234734560381e-06,
+ "loss": 0.7167,
+ "step": 1979
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2181054895575847e-06,
+ "loss": 0.7118,
+ "step": 1980
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2099892621269705e-06,
+ "loss": 0.68,
+ "step": 1981
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.20188606584538e-06,
+ "loss": 0.5915,
+ "step": 1982
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.193795914267871e-06,
+ "loss": 0.6398,
+ "step": 1983
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1857188209276605e-06,
+ "loss": 0.7034,
+ "step": 1984
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1776547993361306e-06,
+ "loss": 0.7103,
+ "step": 1985
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.169603862982801e-06,
+ "loss": 0.6161,
+ "step": 1986
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1615660253352888e-06,
+ "loss": 0.6937,
+ "step": 1987
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.153541299839318e-06,
+ "loss": 0.7493,
+ "step": 1988
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.145529699918666e-06,
+ "loss": 0.6235,
+ "step": 1989
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1375312389751546e-06,
+ "loss": 0.6494,
+ "step": 1990
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.129545930388627e-06,
+ "loss": 0.6255,
+ "step": 1991
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.121573787516936e-06,
+ "loss": 0.6712,
+ "step": 1992
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.113614823695892e-06,
+ "loss": 0.5916,
+ "step": 1993
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.105669052239274e-06,
+ "loss": 0.6827,
+ "step": 1994
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0977364864387896e-06,
+ "loss": 0.7326,
+ "step": 1995
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0898171395640463e-06,
+ "loss": 0.7246,
+ "step": 1996
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.081911024862554e-06,
+ "loss": 0.6045,
+ "step": 1997
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.074018155559683e-06,
+ "loss": 0.6076,
+ "step": 1998
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.066138544858637e-06,
+ "loss": 0.6543,
+ "step": 1999
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0582722059404515e-06,
+ "loss": 0.6783,
+ "step": 2000
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.050419151963957e-06,
+ "loss": 0.7195,
+ "step": 2001
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0425793960657623e-06,
+ "loss": 0.6747,
+ "step": 2002
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0347529513602305e-06,
+ "loss": 0.6142,
+ "step": 2003
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0269398309394585e-06,
+ "loss": 0.6626,
+ "step": 2004
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.019140047873246e-06,
+ "loss": 0.664,
+ "step": 2005
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0113536152091006e-06,
+ "loss": 0.6426,
+ "step": 2006
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0035805459721768e-06,
+ "loss": 0.6046,
+ "step": 2007
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9958208531652876e-06,
+ "loss": 0.6491,
+ "step": 2008
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9880745497688658e-06,
+ "loss": 0.7318,
+ "step": 2009
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9803416487409465e-06,
+ "loss": 0.7052,
+ "step": 2010
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.972622163017145e-06,
+ "loss": 0.6415,
+ "step": 2011
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.964916105510637e-06,
+ "loss": 0.622,
+ "step": 2012
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9572234891121354e-06,
+ "loss": 0.6553,
+ "step": 2013
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9495443266898615e-06,
+ "loss": 0.7111,
+ "step": 2014
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9418786310895467e-06,
+ "loss": 0.6808,
+ "step": 2015
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.934226415134377e-06,
+ "loss": 0.6634,
+ "step": 2016
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9265876916250026e-06,
+ "loss": 0.7151,
+ "step": 2017
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.918962473339496e-06,
+ "loss": 0.623,
+ "step": 2018
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9113507730333435e-06,
+ "loss": 0.6589,
+ "step": 2019
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9037526034394149e-06,
+ "loss": 0.6568,
+ "step": 2020
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8961679772679486e-06,
+ "loss": 0.7222,
+ "step": 2021
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8885969072065224e-06,
+ "loss": 0.6626,
+ "step": 2022
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.88103940592004e-06,
+ "loss": 0.7087,
+ "step": 2023
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.873495486050716e-06,
+ "loss": 0.5933,
+ "step": 2024
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8659651602180295e-06,
+ "loss": 0.673,
+ "step": 2025
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.858448441018732e-06,
+ "loss": 0.7387,
+ "step": 2026
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8509453410268086e-06,
+ "loss": 0.6716,
+ "step": 2027
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8434558727934636e-06,
+ "loss": 0.7268,
+ "step": 2028
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.835980048847098e-06,
+ "loss": 0.6437,
+ "step": 2029
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8285178816932913e-06,
+ "loss": 0.6885,
+ "step": 2030
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8210693838147686e-06,
+ "loss": 0.6504,
+ "step": 2031
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8136345676713962e-06,
+ "loss": 0.6063,
+ "step": 2032
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8062134457001612e-06,
+ "loss": 0.6962,
+ "step": 2033
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7988060303151256e-06,
+ "loss": 0.641,
+ "step": 2034
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7914123339074351e-06,
+ "loss": 0.6483,
+ "step": 2035
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7840323688452833e-06,
+ "loss": 0.7294,
+ "step": 2036
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7766661474738933e-06,
+ "loss": 0.6718,
+ "step": 2037
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7693136821154989e-06,
+ "loss": 0.6823,
+ "step": 2038
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7619749850693235e-06,
+ "loss": 0.6257,
+ "step": 2039
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7546500686115542e-06,
+ "loss": 0.7181,
+ "step": 2040
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7473389449953304e-06,
+ "loss": 0.6958,
+ "step": 2041
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7400416264507192e-06,
+ "loss": 0.6977,
+ "step": 2042
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7327581251846902e-06,
+ "loss": 0.669,
+ "step": 2043
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7254884533811034e-06,
+ "loss": 0.6546,
+ "step": 2044
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7182326232006875e-06,
+ "loss": 0.6974,
+ "step": 2045
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7109906467810033e-06,
+ "loss": 0.7047,
+ "step": 2046
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7037625362364585e-06,
+ "loss": 0.642,
+ "step": 2047
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6965483036582454e-06,
+ "loss": 0.6709,
+ "step": 2048
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6893479611143527e-06,
+ "loss": 0.6953,
+ "step": 2049
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6821615206495313e-06,
+ "loss": 0.7387,
+ "step": 2050
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.674988994285276e-06,
+ "loss": 0.6685,
+ "step": 2051
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.667830394019807e-06,
+ "loss": 0.6557,
+ "step": 2052
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6606857318280478e-06,
+ "loss": 0.709,
+ "step": 2053
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6535550196616102e-06,
+ "loss": 0.6547,
+ "step": 2054
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6464382694487579e-06,
+ "loss": 0.5857,
+ "step": 2055
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6393354930944194e-06,
+ "loss": 0.7401,
+ "step": 2056
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6322467024801282e-06,
+ "loss": 0.6954,
+ "step": 2057
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6251719094640328e-06,
+ "loss": 0.6253,
+ "step": 2058
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6181111258808658e-06,
+ "loss": 0.6421,
+ "step": 2059
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.611064363541922e-06,
+ "loss": 0.6626,
+ "step": 2060
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6040316342350425e-06,
+ "loss": 0.6445,
+ "step": 2061
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.597012949724598e-06,
+ "loss": 0.5191,
+ "step": 2062
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.590008321751454e-06,
+ "loss": 0.7103,
+ "step": 2063
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5830177620329712e-06,
+ "loss": 0.6725,
+ "step": 2064
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5760412822629822e-06,
+ "loss": 0.684,
+ "step": 2065
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5690788941117508e-06,
+ "loss": 0.6522,
+ "step": 2066
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5621306092259804e-06,
+ "loss": 0.6748,
+ "step": 2067
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5551964392287788e-06,
+ "loss": 0.6418,
+ "step": 2068
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5482763957196423e-06,
+ "loss": 0.6313,
+ "step": 2069
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5413704902744364e-06,
+ "loss": 0.6608,
+ "step": 2070
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5344787344453803e-06,
+ "loss": 0.7146,
+ "step": 2071
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5276011397610136e-06,
+ "loss": 0.6825,
+ "step": 2072
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.520737717726195e-06,
+ "loss": 0.5956,
+ "step": 2073
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5138884798220798e-06,
+ "loss": 0.6534,
+ "step": 2074
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.507053437506083e-06,
+ "loss": 0.713,
+ "step": 2075
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.500232602211883e-06,
+ "loss": 0.6835,
+ "step": 2076
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4934259853493883e-06,
+ "loss": 0.7259,
+ "step": 2077
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4866335983047264e-06,
+ "loss": 0.7011,
+ "step": 2078
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4798554524402176e-06,
+ "loss": 0.6075,
+ "step": 2079
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4730915590943628e-06,
+ "loss": 0.7029,
+ "step": 2080
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.466341929581816e-06,
+ "loss": 0.6197,
+ "step": 2081
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4596065751933763e-06,
+ "loss": 0.6749,
+ "step": 2082
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.452885507195959e-06,
+ "loss": 0.6907,
+ "step": 2083
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4461787368325863e-06,
+ "loss": 0.6944,
+ "step": 2084
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.439486275322357e-06,
+ "loss": 0.6913,
+ "step": 2085
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4328081338604383e-06,
+ "loss": 0.7284,
+ "step": 2086
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4261443236180418e-06,
+ "loss": 0.7109,
+ "step": 2087
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4194948557424083e-06,
+ "loss": 0.6825,
+ "step": 2088
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.412859741356778e-06,
+ "loss": 0.6814,
+ "step": 2089
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4062389915603903e-06,
+ "loss": 0.6101,
+ "step": 2090
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.3996326174284502e-06,
+ "loss": 0.6181,
+ "step": 2091
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.393040630012118e-06,
+ "loss": 0.6933,
+ "step": 2092
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.386463040338485e-06,
+ "loss": 0.6694,
+ "step": 2093
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3798998594105606e-06,
+ "loss": 0.7168,
+ "step": 2094
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3733510982072485e-06,
+ "loss": 0.6924,
+ "step": 2095
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3668167676833332e-06,
+ "loss": 0.6372,
+ "step": 2096
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3602968787694615e-06,
+ "loss": 0.6818,
+ "step": 2097
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3537914423721144e-06,
+ "loss": 0.7029,
+ "step": 2098
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3473004693736037e-06,
+ "loss": 0.5992,
+ "step": 2099
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3408239706320459e-06,
+ "loss": 0.7204,
+ "step": 2100
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3343619569813437e-06,
+ "loss": 0.692,
+ "step": 2101
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3279144392311693e-06,
+ "loss": 0.6072,
+ "step": 2102
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3214814281669486e-06,
+ "loss": 0.5775,
+ "step": 2103
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.31506293454983e-06,
+ "loss": 0.6609,
+ "step": 2104
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.308658969116694e-06,
+ "loss": 0.6395,
+ "step": 2105
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.302269542580109e-06,
+ "loss": 0.6793,
+ "step": 2106
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2958946656283188e-06,
+ "loss": 0.6859,
+ "step": 2107
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2895343489252355e-06,
+ "loss": 0.6091,
+ "step": 2108
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2831886031104123e-06,
+ "loss": 0.6474,
+ "step": 2109
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2768574387990284e-06,
+ "loss": 0.6831,
+ "step": 2110
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2705408665818707e-06,
+ "loss": 0.6464,
+ "step": 2111
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2642388970253194e-06,
+ "loss": 0.6392,
+ "step": 2112
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2579515406713194e-06,
+ "loss": 0.6657,
+ "step": 2113
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2516788080373766e-06,
+ "loss": 0.6294,
+ "step": 2114
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2454207096165405e-06,
+ "loss": 0.6117,
+ "step": 2115
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.239177255877365e-06,
+ "loss": 0.7068,
+ "step": 2116
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2329484572639183e-06,
+ "loss": 0.6672,
+ "step": 2117
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2267343241957508e-06,
+ "loss": 0.7194,
+ "step": 2118
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2205348670678774e-06,
+ "loss": 0.6514,
+ "step": 2119
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.214350096250767e-06,
+ "loss": 0.6784,
+ "step": 2120
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2081800220903207e-06,
+ "loss": 0.7043,
+ "step": 2121
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2020246549078497e-06,
+ "loss": 0.716,
+ "step": 2122
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1958840050000686e-06,
+ "loss": 0.6255,
+ "step": 2123
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.189758082639072e-06,
+ "loss": 0.6564,
+ "step": 2124
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.183646898072318e-06,
+ "loss": 0.7081,
+ "step": 2125
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1775504615226118e-06,
+ "loss": 0.6784,
+ "step": 2126
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1714687831880867e-06,
+ "loss": 0.7031,
+ "step": 2127
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.165401873242188e-06,
+ "loss": 0.6548,
+ "step": 2128
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1593497418336619e-06,
+ "loss": 0.662,
+ "step": 2129
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1533123990865225e-06,
+ "loss": 0.6395,
+ "step": 2130
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1472898551000544e-06,
+ "loss": 0.6303,
+ "step": 2131
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1412821199487845e-06,
+ "loss": 0.6664,
+ "step": 2132
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.135289203682466e-06,
+ "loss": 0.6883,
+ "step": 2133
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1293111163260639e-06,
+ "loss": 0.614,
+ "step": 2134
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.123347867879737e-06,
+ "loss": 0.7076,
+ "step": 2135
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.117399468318824e-06,
+ "loss": 0.675,
+ "step": 2136
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1114659275938188e-06,
+ "loss": 0.6843,
+ "step": 2137
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.105547255630368e-06,
+ "loss": 0.6603,
+ "step": 2138
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.099643462329235e-06,
+ "loss": 0.6983,
+ "step": 2139
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0937545575663023e-06,
+ "loss": 0.7402,
+ "step": 2140
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0878805511925438e-06,
+ "loss": 0.5793,
+ "step": 2141
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0820214530340124e-06,
+ "loss": 0.6841,
+ "step": 2142
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0761772728918208e-06,
+ "loss": 0.6077,
+ "step": 2143
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0703480205421302e-06,
+ "loss": 0.6417,
+ "step": 2144
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0645337057361215e-06,
+ "loss": 0.632,
+ "step": 2145
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0587343382000027e-06,
+ "loss": 0.6747,
+ "step": 2146
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0529499276349686e-06,
+ "loss": 0.5994,
+ "step": 2147
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0471804837171916e-06,
+ "loss": 0.5687,
+ "step": 2148
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0414260160978141e-06,
+ "loss": 0.6707,
+ "step": 2149
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0356865344029231e-06,
+ "loss": 0.4886,
+ "step": 2150
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.029962048233537e-06,
+ "loss": 0.6509,
+ "step": 2151
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0242525671655912e-06,
+ "loss": 0.6144,
+ "step": 2152
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0185581007499212e-06,
+ "loss": 0.6814,
+ "step": 2153
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0128786585122385e-06,
+ "loss": 0.6658,
+ "step": 2154
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0072142499531346e-06,
+ "loss": 0.676,
+ "step": 2155
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0015648845480453e-06,
+ "loss": 0.5966,
+ "step": 2156
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.95930571747239e-07,
+ "loss": 0.6368,
+ "step": 2157
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.903113209758098e-07,
+ "loss": 0.6959,
+ "step": 2158
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.84707141633654e-07,
+ "loss": 0.6308,
+ "step": 2159
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.791180430954562e-07,
+ "loss": 0.6793,
+ "step": 2160
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.735440347106762e-07,
+ "loss": 0.6606,
+ "step": 2161
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.679851258035277e-07,
+ "loss": 0.6803,
+ "step": 2162
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.624413256729636e-07,
+ "loss": 0.5774,
+ "step": 2163
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.569126435926735e-07,
+ "loss": 0.7197,
+ "step": 2164
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.513990888110447e-07,
+ "loss": 0.6423,
+ "step": 2165
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.459006705511664e-07,
+ "loss": 0.6318,
+ "step": 2166
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.404173980108067e-07,
+ "loss": 0.6008,
+ "step": 2167
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.349492803623972e-07,
+ "loss": 0.7172,
+ "step": 2168
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.294963267530177e-07,
+ "loss": 0.7605,
+ "step": 2169
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.240585463043849e-07,
+ "loss": 0.6705,
+ "step": 2170
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.186359481128282e-07,
+ "loss": 0.619,
+ "step": 2171
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.132285412492825e-07,
+ "loss": 0.6655,
+ "step": 2172
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.078363347592767e-07,
+ "loss": 0.6528,
+ "step": 2173
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.024593376629009e-07,
+ "loss": 0.6338,
+ "step": 2174
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.970975589548114e-07,
+ "loss": 0.7132,
+ "step": 2175
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.917510076042058e-07,
+ "loss": 0.6475,
+ "step": 2176
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.864196925548063e-07,
+ "loss": 0.5703,
+ "step": 2177
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.811036227248515e-07,
+ "loss": 0.6849,
+ "step": 2178
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.758028070070768e-07,
+ "loss": 0.7121,
+ "step": 2179
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.705172542686968e-07,
+ "loss": 0.6272,
+ "step": 2180
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.652469733513958e-07,
+ "loss": 0.7059,
+ "step": 2181
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.599919730713191e-07,
+ "loss": 0.6933,
+ "step": 2182
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.547522622190385e-07,
+ "loss": 0.5546,
+ "step": 2183
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.495278495595572e-07,
+ "loss": 0.6194,
+ "step": 2184
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.443187438322864e-07,
+ "loss": 0.6409,
+ "step": 2185
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.391249537510271e-07,
+ "loss": 0.6351,
+ "step": 2186
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.339464880039671e-07,
+ "loss": 0.6684,
+ "step": 2187
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.287833552536584e-07,
+ "loss": 0.6821,
+ "step": 2188
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.236355641369975e-07,
+ "loss": 0.7171,
+ "step": 2189
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.185031232652252e-07,
+ "loss": 0.6063,
+ "step": 2190
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.133860412238992e-07,
+ "loss": 0.5808,
+ "step": 2191
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.082843265728879e-07,
+ "loss": 0.6641,
+ "step": 2192
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.031979878463525e-07,
+ "loss": 0.752,
+ "step": 2193
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.981270335527347e-07,
+ "loss": 0.655,
+ "step": 2194
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.930714721747323e-07,
+ "loss": 0.6117,
+ "step": 2195
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.880313121693073e-07,
+ "loss": 0.6563,
+ "step": 2196
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.830065619676519e-07,
+ "loss": 0.6065,
+ "step": 2197
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.77997229975177e-07,
+ "loss": 0.5963,
+ "step": 2198
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.730033245715063e-07,
+ "loss": 0.6669,
+ "step": 2199
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.680248541104574e-07,
+ "loss": 0.7173,
+ "step": 2200
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.630618269200285e-07,
+ "loss": 0.698,
+ "step": 2201
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.58114251302382e-07,
+ "loss": 0.6352,
+ "step": 2202
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.531821355338386e-07,
+ "loss": 0.61,
+ "step": 2203
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.482654878648465e-07,
+ "loss": 0.6725,
+ "step": 2204
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.433643165199933e-07,
+ "loss": 0.6275,
+ "step": 2205
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.384786296979662e-07,
+ "loss": 0.6146,
+ "step": 2206
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.336084355715555e-07,
+ "loss": 0.6334,
+ "step": 2207
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.287537422876323e-07,
+ "loss": 0.596,
+ "step": 2208
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.239145579671414e-07,
+ "loss": 0.7014,
+ "step": 2209
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.190908907050808e-07,
+ "loss": 0.6938,
+ "step": 2210
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.142827485704951e-07,
+ "loss": 0.6408,
+ "step": 2211
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.094901396064535e-07,
+ "loss": 0.6939,
+ "step": 2212
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.047130718300433e-07,
+ "loss": 0.6949,
+ "step": 2213
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.999515532323598e-07,
+ "loss": 0.5715,
+ "step": 2214
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.952055917784783e-07,
+ "loss": 0.6716,
+ "step": 2215
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.904751954074574e-07,
+ "loss": 0.7574,
+ "step": 2216
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.857603720323136e-07,
+ "loss": 0.6852,
+ "step": 2217
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.810611295400171e-07,
+ "loss": 0.7108,
+ "step": 2218
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.76377475791471e-07,
+ "loss": 0.6669,
+ "step": 2219
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.717094186215056e-07,
+ "loss": 0.6764,
+ "step": 2220
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.670569658388537e-07,
+ "loss": 0.7655,
+ "step": 2221
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.624201252261497e-07,
+ "loss": 0.6326,
+ "step": 2222
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.57798904539918e-07,
+ "loss": 0.6373,
+ "step": 2223
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.531933115105427e-07,
+ "loss": 0.6847,
+ "step": 2224
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.48603353842272e-07,
+ "loss": 0.5997,
+ "step": 2225
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.440290392131998e-07,
+ "loss": 0.694,
+ "step": 2226
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.394703752752474e-07,
+ "loss": 0.6807,
+ "step": 2227
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.349273696541625e-07,
+ "loss": 0.6281,
+ "step": 2228
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.304000299494961e-07,
+ "loss": 0.621,
+ "step": 2229
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.258883637345914e-07,
+ "loss": 0.6529,
+ "step": 2230
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.213923785565723e-07,
+ "loss": 0.6779,
+ "step": 2231
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.169120819363406e-07,
+ "loss": 0.6859,
+ "step": 2232
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.124474813685422e-07,
+ "loss": 0.6241,
+ "step": 2233
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.079985843215719e-07,
+ "loss": 0.6923,
+ "step": 2234
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.0356539823756e-07,
+ "loss": 0.6782,
+ "step": 2235
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.991479305323433e-07,
+ "loss": 0.6957,
+ "step": 2236
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.947461885954798e-07,
+ "loss": 0.641,
+ "step": 2237
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.903601797902126e-07,
+ "loss": 0.7299,
+ "step": 2238
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.859899114534662e-07,
+ "loss": 0.6611,
+ "step": 2239
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.816353908958361e-07,
+ "loss": 0.6881,
+ "step": 2240
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.772966254015755e-07,
+ "loss": 0.6499,
+ "step": 2241
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.729736222285832e-07,
+ "loss": 0.7044,
+ "step": 2242
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.686663886083877e-07,
+ "loss": 0.6453,
+ "step": 2243
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.643749317461422e-07,
+ "loss": 0.7034,
+ "step": 2244
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.600992588206011e-07,
+ "loss": 0.6614,
+ "step": 2245
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.558393769841286e-07,
+ "loss": 0.6215,
+ "step": 2246
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.51595293362659e-07,
+ "loss": 0.7524,
+ "step": 2247
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.473670150557076e-07,
+ "loss": 0.6393,
+ "step": 2248
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.431545491363488e-07,
+ "loss": 0.6916,
+ "step": 2249
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.389579026512059e-07,
+ "loss": 0.6528,
+ "step": 2250
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.347770826204401e-07,
+ "loss": 0.7439,
+ "step": 2251
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.306120960377381e-07,
+ "loss": 0.6712,
+ "step": 2252
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.264629498702966e-07,
+ "loss": 0.6405,
+ "step": 2253
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.223296510588182e-07,
+ "loss": 0.605,
+ "step": 2254
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.182122065174988e-07,
+ "loss": 0.6287,
+ "step": 2255
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.141106231340065e-07,
+ "loss": 0.6222,
+ "step": 2256
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.100249077694797e-07,
+ "loss": 0.6635,
+ "step": 2257
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.059550672585145e-07,
+ "loss": 0.6284,
+ "step": 2258
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.019011084091496e-07,
+ "loss": 0.666,
+ "step": 2259
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.978630380028582e-07,
+ "loss": 0.6597,
+ "step": 2260
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.938408627945335e-07,
+ "loss": 0.6329,
+ "step": 2261
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.898345895124801e-07,
+ "loss": 0.7093,
+ "step": 2262
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.858442248583995e-07,
+ "loss": 0.6425,
+ "step": 2263
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.818697755073876e-07,
+ "loss": 0.6582,
+ "step": 2264
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.779112481079107e-07,
+ "loss": 0.5798,
+ "step": 2265
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7396864928180296e-07,
+ "loss": 0.624,
+ "step": 2266
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.700419856242555e-07,
+ "loss": 0.6741,
+ "step": 2267
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6613126370379627e-07,
+ "loss": 0.7305,
+ "step": 2268
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6223649006229553e-07,
+ "loss": 0.7157,
+ "step": 2269
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.583576712149385e-07,
+ "loss": 0.5922,
+ "step": 2270
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5449481365022143e-07,
+ "loss": 0.6156,
+ "step": 2271
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5064792382994216e-07,
+ "loss": 0.6635,
+ "step": 2272
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.468170081891898e-07,
+ "loss": 0.6872,
+ "step": 2273
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4300207313632713e-07,
+ "loss": 0.6408,
+ "step": 2274
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.392031250529871e-07,
+ "loss": 0.6066,
+ "step": 2275
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.3542017029406083e-07,
+ "loss": 0.6139,
+ "step": 2276
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.316532151876807e-07,
+ "loss": 0.7126,
+ "step": 2277
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.279022660352228e-07,
+ "loss": 0.7134,
+ "step": 2278
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2416732911128247e-07,
+ "loss": 0.7205,
+ "step": 2279
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2044841066367084e-07,
+ "loss": 0.6782,
+ "step": 2280
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.167455169134027e-07,
+ "loss": 0.694,
+ "step": 2281
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.130586540546899e-07,
+ "loss": 0.7133,
+ "step": 2282
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.093878282549246e-07,
+ "loss": 0.6632,
+ "step": 2283
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.057330456546737e-07,
+ "loss": 0.7287,
+ "step": 2284
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0209431236766793e-07,
+ "loss": 0.6384,
+ "step": 2285
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9847163448078373e-07,
+ "loss": 0.6906,
+ "step": 2286
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9486501805405253e-07,
+ "loss": 0.664,
+ "step": 2287
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9127446912062606e-07,
+ "loss": 0.6093,
+ "step": 2288
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.876999936867865e-07,
+ "loss": 0.6288,
+ "step": 2289
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8414159773192294e-07,
+ "loss": 0.6363,
+ "step": 2290
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8059928720852957e-07,
+ "loss": 0.6301,
+ "step": 2291
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.770730680421919e-07,
+ "loss": 0.6459,
+ "step": 2292
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.735629461315804e-07,
+ "loss": 0.6054,
+ "step": 2293
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.7006892734843145e-07,
+ "loss": 0.6695,
+ "step": 2294
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6659101753754975e-07,
+ "loss": 0.6569,
+ "step": 2295
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6312922251679373e-07,
+ "loss": 0.6454,
+ "step": 2296
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5968354807705994e-07,
+ "loss": 0.6888,
+ "step": 2297
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5625399998228116e-07,
+ "loss": 0.6349,
+ "step": 2298
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.528405839694149e-07,
+ "loss": 0.6356,
+ "step": 2299
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4944330574843144e-07,
+ "loss": 0.6753,
+ "step": 2300
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.46062171002306e-07,
+ "loss": 0.7087,
+ "step": 2301
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.426971853870109e-07,
+ "loss": 0.5928,
+ "step": 2302
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3934835453149797e-07,
+ "loss": 0.6814,
+ "step": 2303
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.360156840377027e-07,
+ "loss": 0.6582,
+ "step": 2304
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3269917948052454e-07,
+ "loss": 0.7064,
+ "step": 2305
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.293988464078202e-07,
+ "loss": 0.6736,
+ "step": 2306
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2611469034039334e-07,
+ "loss": 0.6719,
+ "step": 2307
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2284671677199065e-07,
+ "loss": 0.6654,
+ "step": 2308
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.1959493116928473e-07,
+ "loss": 0.5991,
+ "step": 2309
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.163593389718711e-07,
+ "loss": 0.6861,
+ "step": 2310
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.131399455922568e-07,
+ "loss": 0.7172,
+ "step": 2311
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.099367564158484e-07,
+ "loss": 0.57,
+ "step": 2312
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0674977680095086e-07,
+ "loss": 0.6722,
+ "step": 2313
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.03579012078753e-07,
+ "loss": 0.6652,
+ "step": 2314
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.004244675533141e-07,
+ "loss": 0.6412,
+ "step": 2315
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.972861485015666e-07,
+ "loss": 0.6895,
+ "step": 2316
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.941640601732976e-07,
+ "loss": 0.609,
+ "step": 2317
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.910582077911428e-07,
+ "loss": 0.5773,
+ "step": 2318
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8796859655058184e-07,
+ "loss": 0.7484,
+ "step": 2319
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8489523161992385e-07,
+ "loss": 0.6316,
+ "step": 2320
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.818381181402985e-07,
+ "loss": 0.6377,
+ "step": 2321
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.787972612256551e-07,
+ "loss": 0.6906,
+ "step": 2322
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7577266596274577e-07,
+ "loss": 0.6478,
+ "step": 2323
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.727643374111222e-07,
+ "loss": 0.6306,
+ "step": 2324
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.697722806031211e-07,
+ "loss": 0.5978,
+ "step": 2325
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.667965005438644e-07,
+ "loss": 0.6709,
+ "step": 2326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6383700221124355e-07,
+ "loss": 0.6289,
+ "step": 2327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6089379055591613e-07,
+ "loss": 0.6379,
+ "step": 2328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.579668705012917e-07,
+ "loss": 0.6622,
+ "step": 2329
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5505624694353027e-07,
+ "loss": 0.6402,
+ "step": 2330
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5216192475152945e-07,
+ "loss": 0.6741,
+ "step": 2331
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.492839087669197e-07,
+ "loss": 0.6897,
+ "step": 2332
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.464222038040509e-07,
+ "loss": 0.7218,
+ "step": 2333
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.435768146499939e-07,
+ "loss": 0.6188,
+ "step": 2334
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.407477460645191e-07,
+ "loss": 0.6869,
+ "step": 2335
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3793500278009974e-07,
+ "loss": 0.6749,
+ "step": 2336
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3513858950190206e-07,
+ "loss": 0.6618,
+ "step": 2337
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3235851090777084e-07,
+ "loss": 0.6795,
+ "step": 2338
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2959477164822942e-07,
+ "loss": 0.5928,
+ "step": 2339
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2684737634646737e-07,
+ "loss": 0.707,
+ "step": 2340
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2411632959833285e-07,
+ "loss": 0.6543,
+ "step": 2341
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2140163597233033e-07,
+ "loss": 0.6898,
+ "step": 2342
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1870330000960504e-07,
+ "loss": 0.7114,
+ "step": 2343
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1602132622393745e-07,
+ "loss": 0.7062,
+ "step": 2344
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1335571910174214e-07,
+ "loss": 0.6606,
+ "step": 2345
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1070648310205443e-07,
+ "loss": 0.7115,
+ "step": 2346
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0807362265651943e-07,
+ "loss": 0.6885,
+ "step": 2347
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0545714216939515e-07,
+ "loss": 0.6694,
+ "step": 2348
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0285704601753608e-07,
+ "loss": 0.6348,
+ "step": 2349
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0027333855038967e-07,
+ "loss": 0.686,
+ "step": 2350
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9770602408998642e-07,
+ "loss": 0.6286,
+ "step": 2351
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9515510693093875e-07,
+ "loss": 0.767,
+ "step": 2352
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9262059134042443e-07,
+ "loss": 0.7519,
+ "step": 2353
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9010248155818755e-07,
+ "loss": 0.6677,
+ "step": 2354
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8760078179653196e-07,
+ "loss": 0.5676,
+ "step": 2355
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8511549624030346e-07,
+ "loss": 0.6652,
+ "step": 2356
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8264662904689533e-07,
+ "loss": 0.6257,
+ "step": 2357
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8019418434623405e-07,
+ "loss": 0.6646,
+ "step": 2358
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7775816624077458e-07,
+ "loss": 0.6901,
+ "step": 2359
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.753385788054962e-07,
+ "loss": 0.6192,
+ "step": 2360
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7293542608788905e-07,
+ "loss": 0.648,
+ "step": 2361
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7054871210795188e-07,
+ "loss": 0.6997,
+ "step": 2362
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6817844085818768e-07,
+ "loss": 0.6115,
+ "step": 2363
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6582461630359149e-07,
+ "loss": 0.6642,
+ "step": 2364
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6348724238164583e-07,
+ "loss": 0.6405,
+ "step": 2365
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6116632300231638e-07,
+ "loss": 0.6862,
+ "step": 2366
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.58861862048042e-07,
+ "loss": 0.6917,
+ "step": 2367
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5657386337373237e-07,
+ "loss": 0.6955,
+ "step": 2368
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5430233080675593e-07,
+ "loss": 0.7348,
+ "step": 2369
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.520472681469376e-07,
+ "loss": 0.6571,
+ "step": 2370
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4980867916655205e-07,
+ "loss": 0.5744,
+ "step": 2371
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.475865676103161e-07,
+ "loss": 0.6859,
+ "step": 2372
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4538093719538404e-07,
+ "loss": 0.691,
+ "step": 2373
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.431917916113401e-07,
+ "loss": 0.6932,
+ "step": 2374
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4101913452019277e-07,
+ "loss": 0.6029,
+ "step": 2375
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3886296955636481e-07,
+ "loss": 0.7028,
+ "step": 2376
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3672330032669767e-07,
+ "loss": 0.6845,
+ "step": 2377
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3460013041043606e-07,
+ "loss": 0.5391,
+ "step": 2378
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.324934633592201e-07,
+ "loss": 0.6586,
+ "step": 2379
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.304033026970908e-07,
+ "loss": 0.6749,
+ "step": 2380
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.283296519204713e-07,
+ "loss": 0.665,
+ "step": 2381
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2627251449817247e-07,
+ "loss": 0.6648,
+ "step": 2382
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2423189387137713e-07,
+ "loss": 0.6161,
+ "step": 2383
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2220779345364143e-07,
+ "loss": 0.6739,
+ "step": 2384
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2020021663088244e-07,
+ "loss": 0.7221,
+ "step": 2385
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1820916676138384e-07,
+ "loss": 0.613,
+ "step": 2386
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1623464717577804e-07,
+ "loss": 0.6879,
+ "step": 2387
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1427666117704406e-07,
+ "loss": 0.7211,
+ "step": 2388
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1233521204050634e-07,
+ "loss": 0.7459,
+ "step": 2389
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1041030301382705e-07,
+ "loss": 0.6619,
+ "step": 2390
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.085019373169971e-07,
+ "loss": 0.6651,
+ "step": 2391
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0661011814233624e-07,
+ "loss": 0.6713,
+ "step": 2392
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0473484865448524e-07,
+ "loss": 0.6655,
+ "step": 2393
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0287613199039592e-07,
+ "loss": 0.697,
+ "step": 2394
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0103397125933778e-07,
+ "loss": 0.6834,
+ "step": 2395
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.920836954288137e-08,
+ "loss": 0.6264,
+ "step": 2396
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.739932989489498e-08,
+ "loss": 0.5328,
+ "step": 2397
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.56068553415479e-08,
+ "loss": 0.6832,
+ "step": 2398
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.383094888129274e-08,
+ "loss": 0.6567,
+ "step": 2399
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.207161348487315e-08,
+ "loss": 0.6521,
+ "step": 2400
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.032885209530717e-08,
+ "loss": 0.6031,
+ "step": 2401
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.860266762789283e-08,
+ "loss": 0.5634,
+ "step": 2402
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.689306297019362e-08,
+ "loss": 0.6906,
+ "step": 2403
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.520004098204193e-08,
+ "loss": 0.5702,
+ "step": 2404
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.352360449552787e-08,
+ "loss": 0.6309,
+ "step": 2405
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.186375631499821e-08,
+ "loss": 0.6625,
+ "step": 2406
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.0220499217053e-08,
+ "loss": 0.7019,
+ "step": 2407
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.859383595053449e-08,
+ "loss": 0.6384,
+ "step": 2408
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.69837692365305e-08,
+ "loss": 0.6347,
+ "step": 2409
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.539030176836549e-08,
+ "loss": 0.6441,
+ "step": 2410
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.381343621159275e-08,
+ "loss": 0.6489,
+ "step": 2411
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.2253175203999e-08,
+ "loss": 0.6471,
+ "step": 2412
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.070952135559195e-08,
+ "loss": 0.5654,
+ "step": 2413
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.918247724859939e-08,
+ "loss": 0.6293,
+ "step": 2414
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.767204543746463e-08,
+ "loss": 0.6501,
+ "step": 2415
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.617822844884325e-08,
+ "loss": 0.6355,
+ "step": 2416
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.470102878159301e-08,
+ "loss": 0.6607,
+ "step": 2417
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.324044890677839e-08,
+ "loss": 0.6547,
+ "step": 2418
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.179649126766274e-08,
+ "loss": 0.609,
+ "step": 2419
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.036915827969947e-08,
+ "loss": 0.6605,
+ "step": 2420
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.895845233053643e-08,
+ "loss": 0.6665,
+ "step": 2421
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.7564375780004843e-08,
+ "loss": 0.6591,
+ "step": 2422
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.618693096011929e-08,
+ "loss": 0.6818,
+ "step": 2423
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.482612017507327e-08,
+ "loss": 0.5735,
+ "step": 2424
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.348194570123588e-08,
+ "loss": 0.7178,
+ "step": 2425
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.2154409787141815e-08,
+ "loss": 0.6061,
+ "step": 2426
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.084351465350024e-08,
+ "loss": 0.6811,
+ "step": 2427
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.954926249317815e-08,
+ "loss": 0.6903,
+ "step": 2428
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.8271655471202605e-08,
+ "loss": 0.7069,
+ "step": 2429
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.701069572475958e-08,
+ "loss": 0.6505,
+ "step": 2430
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.5766385363184006e-08,
+ "loss": 0.7024,
+ "step": 2431
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.453872646796309e-08,
+ "loss": 0.6336,
+ "step": 2432
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.3327721092726314e-08,
+ "loss": 0.7148,
+ "step": 2433
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.213337126324768e-08,
+ "loss": 0.6285,
+ "step": 2434
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.0955678977436796e-08,
+ "loss": 0.5676,
+ "step": 2435
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.979464620534113e-08,
+ "loss": 0.619,
+ "step": 2436
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.8650274889139306e-08,
+ "loss": 0.6693,
+ "step": 2437
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.752256694313783e-08,
+ "loss": 0.605,
+ "step": 2438
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.641152425376992e-08,
+ "loss": 0.6462,
+ "step": 2439
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.531714867959113e-08,
+ "loss": 0.6499,
+ "step": 2440
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.423944205127372e-08,
+ "loss": 0.5979,
+ "step": 2441
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.3178406171608946e-08,
+ "loss": 0.6062,
+ "step": 2442
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.213404281550148e-08,
+ "loss": 0.6851,
+ "step": 2443
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.1106353729962734e-08,
+ "loss": 0.686,
+ "step": 2444
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.009534063411534e-08,
+ "loss": 0.6795,
+ "step": 2445
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.910100521918313e-08,
+ "loss": 0.6696,
+ "step": 2446
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.812334914849335e-08,
+ "loss": 0.671,
+ "step": 2447
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.716237405747113e-08,
+ "loss": 0.6142,
+ "step": 2448
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.6218081553638363e-08,
+ "loss": 0.6367,
+ "step": 2449
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.5290473216610378e-08,
+ "loss": 0.6343,
+ "step": 2450
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.4379550598092604e-08,
+ "loss": 0.6494,
+ "step": 2451
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3485315221877246e-08,
+ "loss": 0.6759,
+ "step": 2452
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.2607768583843282e-08,
+ "loss": 0.6137,
+ "step": 2453
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.1746912151955346e-08,
+ "loss": 0.7235,
+ "step": 2454
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.090274736625486e-08,
+ "loss": 0.6912,
+ "step": 2455
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.0075275638862247e-08,
+ "loss": 0.6558,
+ "step": 2456
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.9264498353974704e-08,
+ "loss": 0.6461,
+ "step": 2457
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.8470416867861775e-08,
+ "loss": 0.6604,
+ "step": 2458
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.769303250886534e-08,
+ "loss": 0.6274,
+ "step": 2459
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6932346577397396e-08,
+ "loss": 0.6836,
+ "step": 2460
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6188360345932297e-08,
+ "loss": 0.6153,
+ "step": 2461
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.546107505901451e-08,
+ "loss": 0.6751,
+ "step": 2462
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4750491933247513e-08,
+ "loss": 0.7112,
+ "step": 2463
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4056612157297145e-08,
+ "loss": 0.6779,
+ "step": 2464
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.3379436891886034e-08,
+ "loss": 0.6113,
+ "step": 2465
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2718967269795823e-08,
+ "loss": 0.5939,
+ "step": 2466
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2075204395860518e-08,
+ "loss": 0.7174,
+ "step": 2467
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.1448149346969806e-08,
+ "loss": 0.6953,
+ "step": 2468
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.083780317206129e-08,
+ "loss": 0.6655,
+ "step": 2469
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0244166892124929e-08,
+ "loss": 0.6732,
+ "step": 2470
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.667241500196378e-09,
+ "loss": 0.6759,
+ "step": 2471
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.107027961356985e-09,
+ "loss": 0.6691,
+ "step": 2472
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.563527212734902e-09,
+ "loss": 0.6781,
+ "step": 2473
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.036740163498425e-09,
+ "loss": 0.6768,
+ "step": 2474
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.526667694858214e-09,
+ "loss": 0.6693,
+ "step": 2475
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.033310660065074e-09,
+ "loss": 0.6575,
+ "step": 2476
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.556669884408839e-09,
+ "loss": 0.5874,
+ "step": 2477
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.09674616521283e-09,
+ "loss": 0.6953,
+ "step": 2478
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.653540271841618e-09,
+ "loss": 0.6688,
+ "step": 2479
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.2270529456888155e-09,
+ "loss": 0.7315,
+ "step": 2480
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.817284900183738e-09,
+ "loss": 0.6875,
+ "step": 2481
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.424236820789185e-09,
+ "loss": 0.6984,
+ "step": 2482
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.047909364994773e-09,
+ "loss": 0.7342,
+ "step": 2483
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.688303162322493e-09,
+ "loss": 0.7396,
+ "step": 2484
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.3454188143233758e-09,
+ "loss": 0.6467,
+ "step": 2485
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.019256894575273e-09,
+ "loss": 0.6484,
+ "step": 2486
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.7098179486806376e-09,
+ "loss": 0.6755,
+ "step": 2487
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.4171024942720723e-09,
+ "loss": 0.652,
+ "step": 2488
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.14111102100345e-09,
+ "loss": 0.621,
+ "step": 2489
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.881843990554355e-09,
+ "loss": 0.7173,
+ "step": 2490
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.6393018366278601e-09,
+ "loss": 0.6266,
+ "step": 2491
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.4134849649471982e-09,
+ "loss": 0.7034,
+ "step": 2492
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.2043937532613126e-09,
+ "loss": 0.68,
+ "step": 2493
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.0120285513381957e-09,
+ "loss": 0.6299,
+ "step": 2494
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.363896809659989e-10,
+ "loss": 0.7378,
+ "step": 2495
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 6.774774359541436e-10,
+ "loss": 0.6409,
+ "step": 2496
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 5.352920821311002e-10,
+ "loss": 0.6633,
+ "step": 2497
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.0983385734660875e-10,
+ "loss": 0.6526,
+ "step": 2498
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.011029714650171e-10,
+ "loss": 0.6352,
+ "step": 2499
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.090996063741635e-10,
+ "loss": 0.6635,
+ "step": 2500
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.3382391597538403e-10,
+ "loss": 0.5883,
+ "step": 2501
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.527602619017415e-11,
+ "loss": 0.6822,
+ "step": 2502
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.3456034959078456e-11,
+ "loss": 0.5954,
+ "step": 2503
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.364012237249698e-12,
+ "loss": 0.5953,
+ "step": 2504
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0,
+ "loss": 0.6442,
+ "step": 2505
+ },
+ {
+ "epoch": 1.0,
+ "step": 2505,
+ "total_flos": 452205235666944.0,
+ "train_loss": 0.7111096807820593,
+ "train_runtime": 10580.2761,
+ "train_samples_per_second": 30.299,
+ "train_steps_per_second": 0.237
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 2505,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 50000,
+ "total_flos": 452205235666944.0,
+ "train_batch_size": 16,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/ICM-LLaVA-v1.5-13B/training_args.bin b/ICM-LLaVA-v1.5-13B/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..28b93ab3e1dd9883d64c3d99d0e309ff0c4ffecc
--- /dev/null
+++ b/ICM-LLaVA-v1.5-13B/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:4661a4e49e1755636c80f25b15f04a6768a59aa456cfee7bc457320ccb3d7614
+size 6776
diff --git a/ICM-LLaVA-v1.5-7B/generation_config.json b/ICM-LLaVA-v1.5-7B/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..9f5b631657a21d1f230063134b29e5189407aa12
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/generation_config.json
@@ -0,0 +1,8 @@
+{
+ "attn_implementation": "sdpa",
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "transformers_version": "4.37.2"
+}
diff --git a/ICM-LLaVA-v1.5-7B/model-00001-of-00003.safetensors b/ICM-LLaVA-v1.5-7B/model-00001-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..310c12008afc5258f1df957bcae63837324023fe
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/model-00001-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:351761683d9d68988fc1cc5d6f533f5c1ffa3bc06d93387800bf2ff8eeca15c5
+size 4938985352
diff --git a/ICM-LLaVA-v1.5-7B/model-00002-of-00003.safetensors b/ICM-LLaVA-v1.5-7B/model-00002-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..080d2004d4265ac3d1d8c03e415ee4a483380670
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/model-00002-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:2f859c639d524c10a734db28ef48c84b4e9064d6272481f41644caf6464f99a1
+size 4947390880
diff --git a/ICM-LLaVA-v1.5-7B/model.safetensors.index.json b/ICM-LLaVA-v1.5-7B/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..5f26f5522b312502b966e3668b3827036308f013
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/model.safetensors.index.json
@@ -0,0 +1,693 @@
+{
+ "metadata": {
+ "total_size": 14125805568
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.class_embedding": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/ICM-LLaVA-v1.5-7B/runs/Dec13_14-39-07_hk01dgx044/events.out.tfevents.1734071975.hk01dgx044.2673069.0 b/ICM-LLaVA-v1.5-7B/runs/Dec13_14-39-07_hk01dgx044/events.out.tfevents.1734071975.hk01dgx044.2673069.0
new file mode 100644
index 0000000000000000000000000000000000000000..92505bb5f44429a5c834476f61dd8d980a6d15e7
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/runs/Dec13_14-39-07_hk01dgx044/events.out.tfevents.1734071975.hk01dgx044.2673069.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e3894fde0453a32695915256c1a8ad2f372bd644e758dd98b1775cc32e24519f
+size 792066
diff --git a/ICM-LLaVA-v1.5-7B/special_tokens_map.json b/ICM-LLaVA-v1.5-7B/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/ICM-LLaVA-v1.5-7B/tokenizer.model b/ICM-LLaVA-v1.5-7B/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/ICM-LLaVA-v1.5-7B/tokenizer_config.json b/ICM-LLaVA-v1.5-7B/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/ICM-LLaVA-v1.5-7B/trainer_state.json b/ICM-LLaVA-v1.5-7B/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..b0ce1275c81b3cadbf0034e793e5300bbe996230
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/trainer_state.json
@@ -0,0 +1,30084 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.0,
+ "eval_steps": 500,
+ "global_step": 5009,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.3245033112582784e-07,
+ "loss": 1.3452,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.649006622516557e-07,
+ "loss": 1.2837,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.973509933774835e-07,
+ "loss": 1.4792,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.298013245033113e-07,
+ "loss": 1.337,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 6.622516556291392e-07,
+ "loss": 1.2501,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.94701986754967e-07,
+ "loss": 1.4335,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 9.271523178807948e-07,
+ "loss": 1.2176,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0596026490066227e-06,
+ "loss": 1.2342,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.1920529801324504e-06,
+ "loss": 1.2596,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.3245033112582784e-06,
+ "loss": 1.3475,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.456953642384106e-06,
+ "loss": 1.2611,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.589403973509934e-06,
+ "loss": 1.2787,
+ "step": 12
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.7218543046357616e-06,
+ "loss": 1.2362,
+ "step": 13
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.8543046357615895e-06,
+ "loss": 1.1751,
+ "step": 14
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.9867549668874175e-06,
+ "loss": 1.1141,
+ "step": 15
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.1192052980132454e-06,
+ "loss": 1.0836,
+ "step": 16
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.2516556291390733e-06,
+ "loss": 1.0752,
+ "step": 17
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.384105960264901e-06,
+ "loss": 1.0108,
+ "step": 18
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.516556291390729e-06,
+ "loss": 0.992,
+ "step": 19
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6490066225165567e-06,
+ "loss": 1.0185,
+ "step": 20
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.7814569536423843e-06,
+ "loss": 0.9228,
+ "step": 21
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.913907284768212e-06,
+ "loss": 0.9946,
+ "step": 22
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.04635761589404e-06,
+ "loss": 1.0039,
+ "step": 23
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.178807947019868e-06,
+ "loss": 0.926,
+ "step": 24
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.311258278145696e-06,
+ "loss": 0.8459,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.443708609271523e-06,
+ "loss": 0.9591,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.576158940397351e-06,
+ "loss": 0.9916,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.708609271523179e-06,
+ "loss": 0.9733,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.841059602649007e-06,
+ "loss": 0.8621,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.973509933774835e-06,
+ "loss": 0.9063,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.105960264900663e-06,
+ "loss": 0.9868,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.238410596026491e-06,
+ "loss": 0.8948,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.370860927152319e-06,
+ "loss": 0.9113,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.503311258278147e-06,
+ "loss": 0.9238,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.635761589403974e-06,
+ "loss": 0.7997,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.768211920529802e-06,
+ "loss": 0.8155,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.90066225165563e-06,
+ "loss": 0.9045,
+ "step": 37
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.033112582781458e-06,
+ "loss": 0.8793,
+ "step": 38
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.165562913907285e-06,
+ "loss": 0.9175,
+ "step": 39
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.2980132450331135e-06,
+ "loss": 0.8839,
+ "step": 40
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.430463576158941e-06,
+ "loss": 0.8622,
+ "step": 41
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.5629139072847685e-06,
+ "loss": 0.7864,
+ "step": 42
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.6953642384105965e-06,
+ "loss": 0.8379,
+ "step": 43
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.827814569536424e-06,
+ "loss": 0.7651,
+ "step": 44
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.960264900662252e-06,
+ "loss": 0.8635,
+ "step": 45
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.09271523178808e-06,
+ "loss": 0.883,
+ "step": 46
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.225165562913907e-06,
+ "loss": 0.8034,
+ "step": 47
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.357615894039736e-06,
+ "loss": 0.7511,
+ "step": 48
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.490066225165563e-06,
+ "loss": 0.777,
+ "step": 49
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.622516556291392e-06,
+ "loss": 0.774,
+ "step": 50
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.754966887417219e-06,
+ "loss": 0.8671,
+ "step": 51
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.887417218543046e-06,
+ "loss": 0.8236,
+ "step": 52
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.019867549668875e-06,
+ "loss": 0.8594,
+ "step": 53
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.152317880794702e-06,
+ "loss": 0.8841,
+ "step": 54
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.28476821192053e-06,
+ "loss": 0.777,
+ "step": 55
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.417218543046358e-06,
+ "loss": 0.8694,
+ "step": 56
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.549668874172186e-06,
+ "loss": 0.85,
+ "step": 57
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.682119205298014e-06,
+ "loss": 0.8706,
+ "step": 58
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.814569536423841e-06,
+ "loss": 0.8536,
+ "step": 59
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.94701986754967e-06,
+ "loss": 0.8395,
+ "step": 60
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.079470198675497e-06,
+ "loss": 0.8288,
+ "step": 61
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.211920529801326e-06,
+ "loss": 0.9069,
+ "step": 62
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.344370860927153e-06,
+ "loss": 0.8457,
+ "step": 63
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.476821192052982e-06,
+ "loss": 0.7767,
+ "step": 64
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.609271523178809e-06,
+ "loss": 0.8038,
+ "step": 65
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.741721854304637e-06,
+ "loss": 0.8375,
+ "step": 66
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.874172185430465e-06,
+ "loss": 0.844,
+ "step": 67
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.006622516556293e-06,
+ "loss": 0.7936,
+ "step": 68
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.13907284768212e-06,
+ "loss": 0.8383,
+ "step": 69
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.271523178807948e-06,
+ "loss": 0.7556,
+ "step": 70
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.403973509933776e-06,
+ "loss": 0.7804,
+ "step": 71
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.536423841059603e-06,
+ "loss": 0.7975,
+ "step": 72
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.668874172185432e-06,
+ "loss": 0.6497,
+ "step": 73
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.80132450331126e-06,
+ "loss": 0.6647,
+ "step": 74
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.933774834437086e-06,
+ "loss": 0.7892,
+ "step": 75
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0066225165562915e-05,
+ "loss": 0.755,
+ "step": 76
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0198675496688744e-05,
+ "loss": 0.8269,
+ "step": 77
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.033112582781457e-05,
+ "loss": 0.8893,
+ "step": 78
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0463576158940398e-05,
+ "loss": 0.7533,
+ "step": 79
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0596026490066227e-05,
+ "loss": 0.8595,
+ "step": 80
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0728476821192052e-05,
+ "loss": 0.7328,
+ "step": 81
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0860927152317881e-05,
+ "loss": 0.8198,
+ "step": 82
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.099337748344371e-05,
+ "loss": 0.8508,
+ "step": 83
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1125827814569537e-05,
+ "loss": 0.7732,
+ "step": 84
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1258278145695364e-05,
+ "loss": 0.8002,
+ "step": 85
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1390728476821193e-05,
+ "loss": 0.769,
+ "step": 86
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.152317880794702e-05,
+ "loss": 0.6671,
+ "step": 87
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1655629139072849e-05,
+ "loss": 0.7559,
+ "step": 88
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1788079470198676e-05,
+ "loss": 0.8151,
+ "step": 89
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1920529801324505e-05,
+ "loss": 0.6493,
+ "step": 90
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2052980132450332e-05,
+ "loss": 0.8264,
+ "step": 91
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.218543046357616e-05,
+ "loss": 0.7954,
+ "step": 92
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2317880794701988e-05,
+ "loss": 0.8377,
+ "step": 93
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2450331125827815e-05,
+ "loss": 0.8292,
+ "step": 94
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2582781456953644e-05,
+ "loss": 0.8111,
+ "step": 95
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2715231788079472e-05,
+ "loss": 0.8888,
+ "step": 96
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2847682119205298e-05,
+ "loss": 0.7705,
+ "step": 97
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2980132450331127e-05,
+ "loss": 0.9116,
+ "step": 98
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3112582781456955e-05,
+ "loss": 0.9474,
+ "step": 99
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3245033112582784e-05,
+ "loss": 0.803,
+ "step": 100
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.337748344370861e-05,
+ "loss": 0.8973,
+ "step": 101
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3509933774834438e-05,
+ "loss": 0.7973,
+ "step": 102
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3642384105960267e-05,
+ "loss": 0.7127,
+ "step": 103
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3774834437086093e-05,
+ "loss": 0.7079,
+ "step": 104
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3907284768211921e-05,
+ "loss": 0.7641,
+ "step": 105
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.403973509933775e-05,
+ "loss": 0.8607,
+ "step": 106
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4172185430463577e-05,
+ "loss": 0.8237,
+ "step": 107
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4304635761589404e-05,
+ "loss": 0.8309,
+ "step": 108
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4437086092715233e-05,
+ "loss": 0.7552,
+ "step": 109
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.456953642384106e-05,
+ "loss": 0.7684,
+ "step": 110
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4701986754966889e-05,
+ "loss": 0.7643,
+ "step": 111
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4834437086092716e-05,
+ "loss": 0.854,
+ "step": 112
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4966887417218545e-05,
+ "loss": 0.8617,
+ "step": 113
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5099337748344372e-05,
+ "loss": 0.824,
+ "step": 114
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.52317880794702e-05,
+ "loss": 0.7997,
+ "step": 115
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5364238410596028e-05,
+ "loss": 0.836,
+ "step": 116
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5496688741721853e-05,
+ "loss": 0.8108,
+ "step": 117
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5629139072847682e-05,
+ "loss": 0.797,
+ "step": 118
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.576158940397351e-05,
+ "loss": 0.8252,
+ "step": 119
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.589403973509934e-05,
+ "loss": 0.8046,
+ "step": 120
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6026490066225165e-05,
+ "loss": 0.8392,
+ "step": 121
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6158940397350994e-05,
+ "loss": 0.8177,
+ "step": 122
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6291390728476823e-05,
+ "loss": 0.8189,
+ "step": 123
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.642384105960265e-05,
+ "loss": 0.8925,
+ "step": 124
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6556291390728477e-05,
+ "loss": 0.7217,
+ "step": 125
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6688741721854306e-05,
+ "loss": 0.7453,
+ "step": 126
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6821192052980134e-05,
+ "loss": 0.6829,
+ "step": 127
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6953642384105963e-05,
+ "loss": 0.7928,
+ "step": 128
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.708609271523179e-05,
+ "loss": 0.8594,
+ "step": 129
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7218543046357617e-05,
+ "loss": 0.7596,
+ "step": 130
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7350993377483446e-05,
+ "loss": 0.8385,
+ "step": 131
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7483443708609275e-05,
+ "loss": 0.7885,
+ "step": 132
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.76158940397351e-05,
+ "loss": 0.8114,
+ "step": 133
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.774834437086093e-05,
+ "loss": 0.7621,
+ "step": 134
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7880794701986758e-05,
+ "loss": 0.8402,
+ "step": 135
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8013245033112587e-05,
+ "loss": 0.8151,
+ "step": 136
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8145695364238412e-05,
+ "loss": 0.8806,
+ "step": 137
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.827814569536424e-05,
+ "loss": 0.8555,
+ "step": 138
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.841059602649007e-05,
+ "loss": 0.8082,
+ "step": 139
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8543046357615895e-05,
+ "loss": 0.7664,
+ "step": 140
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8675496688741724e-05,
+ "loss": 0.758,
+ "step": 141
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8807947019867553e-05,
+ "loss": 0.8658,
+ "step": 142
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8940397350993378e-05,
+ "loss": 0.8967,
+ "step": 143
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9072847682119207e-05,
+ "loss": 0.8162,
+ "step": 144
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9205298013245036e-05,
+ "loss": 0.684,
+ "step": 145
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9337748344370864e-05,
+ "loss": 0.8111,
+ "step": 146
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.947019867549669e-05,
+ "loss": 0.7319,
+ "step": 147
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.960264900662252e-05,
+ "loss": 0.775,
+ "step": 148
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9735099337748347e-05,
+ "loss": 0.8568,
+ "step": 149
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9867549668874173e-05,
+ "loss": 0.8405,
+ "step": 150
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 0.8893,
+ "step": 151
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999997908996724e-05,
+ "loss": 0.7669,
+ "step": 152
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999991635987763e-05,
+ "loss": 0.8536,
+ "step": 153
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999981180975748e-05,
+ "loss": 0.7161,
+ "step": 154
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999966543965042e-05,
+ "loss": 0.7996,
+ "step": 155
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999947724961774e-05,
+ "loss": 0.8276,
+ "step": 156
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999924723973812e-05,
+ "loss": 0.7877,
+ "step": 157
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999897541010772e-05,
+ "loss": 0.7959,
+ "step": 158
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999866176084026e-05,
+ "loss": 0.7558,
+ "step": 159
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999983062920669e-05,
+ "loss": 0.8019,
+ "step": 160
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999790900393628e-05,
+ "loss": 0.8307,
+ "step": 161
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999746989661454e-05,
+ "loss": 0.7112,
+ "step": 162
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999698897028537e-05,
+ "loss": 0.8289,
+ "step": 163
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999646622514982e-05,
+ "loss": 0.7977,
+ "step": 164
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999590166142656e-05,
+ "loss": 0.7848,
+ "step": 165
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999529527935165e-05,
+ "loss": 0.8372,
+ "step": 166
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999946470791787e-05,
+ "loss": 0.8057,
+ "step": 167
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999939570611788e-05,
+ "loss": 0.8996,
+ "step": 168
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999322522564048e-05,
+ "loss": 0.9111,
+ "step": 169
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999924515728698e-05,
+ "loss": 0.8184,
+ "step": 170
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999163610319035e-05,
+ "loss": 0.851,
+ "step": 171
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999907788169431e-05,
+ "loss": 0.7318,
+ "step": 172
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998987971448664e-05,
+ "loss": 0.7285,
+ "step": 173
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999889387961969e-05,
+ "loss": 0.8508,
+ "step": 174
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998795606246738e-05,
+ "loss": 0.7882,
+ "step": 175
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998693151370913e-05,
+ "loss": 0.7713,
+ "step": 176
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998586515035053e-05,
+ "loss": 0.813,
+ "step": 177
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999847569728376e-05,
+ "loss": 0.9117,
+ "step": 178
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998360698163375e-05,
+ "loss": 0.7745,
+ "step": 179
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998241517721987e-05,
+ "loss": 0.7803,
+ "step": 180
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999811815600945e-05,
+ "loss": 0.7473,
+ "step": 181
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997990613077334e-05,
+ "loss": 0.8061,
+ "step": 182
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997858888978997e-05,
+ "loss": 0.7554,
+ "step": 183
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997722983769517e-05,
+ "loss": 0.8056,
+ "step": 184
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999758289750573e-05,
+ "loss": 0.8333,
+ "step": 185
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999743863024622e-05,
+ "loss": 0.8307,
+ "step": 186
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999729018205132e-05,
+ "loss": 0.8307,
+ "step": 187
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997137552983115e-05,
+ "loss": 0.7876,
+ "step": 188
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996980743105427e-05,
+ "loss": 0.8588,
+ "step": 189
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996819752483836e-05,
+ "loss": 0.9757,
+ "step": 190
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999665458118568e-05,
+ "loss": 0.8827,
+ "step": 191
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996485229280018e-05,
+ "loss": 0.8055,
+ "step": 192
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999631169683768e-05,
+ "loss": 0.848,
+ "step": 193
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996133983931236e-05,
+ "loss": 0.7933,
+ "step": 194
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995952090635007e-05,
+ "loss": 0.8802,
+ "step": 195
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999576601702506e-05,
+ "loss": 0.8503,
+ "step": 196
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995575763179213e-05,
+ "loss": 0.8015,
+ "step": 197
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995381329177027e-05,
+ "loss": 0.7815,
+ "step": 198
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999518271509982e-05,
+ "loss": 0.8275,
+ "step": 199
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999497992103064e-05,
+ "loss": 0.7953,
+ "step": 200
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999477294705431e-05,
+ "loss": 0.8159,
+ "step": 201
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994561793257383e-05,
+ "loss": 0.833,
+ "step": 202
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999434645972816e-05,
+ "loss": 0.7633,
+ "step": 203
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9994126946556696e-05,
+ "loss": 0.8476,
+ "step": 204
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999390325383479e-05,
+ "loss": 0.8566,
+ "step": 205
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993675381655986e-05,
+ "loss": 0.8242,
+ "step": 206
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993443330115592e-05,
+ "loss": 0.8708,
+ "step": 207
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993207099310645e-05,
+ "loss": 0.9173,
+ "step": 208
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992966689339936e-05,
+ "loss": 0.8561,
+ "step": 209
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992722100304008e-05,
+ "loss": 0.7105,
+ "step": 210
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992473332305145e-05,
+ "loss": 0.7734,
+ "step": 211
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992220385447384e-05,
+ "loss": 0.9034,
+ "step": 212
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991963259836504e-05,
+ "loss": 0.7848,
+ "step": 213
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999170195558004e-05,
+ "loss": 0.7229,
+ "step": 214
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991436472787267e-05,
+ "loss": 0.8388,
+ "step": 215
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991166811569212e-05,
+ "loss": 0.8261,
+ "step": 216
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990892972038646e-05,
+ "loss": 0.7832,
+ "step": 217
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990614954310086e-05,
+ "loss": 0.8168,
+ "step": 218
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990332758499805e-05,
+ "loss": 0.8728,
+ "step": 219
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990046384725813e-05,
+ "loss": 0.8476,
+ "step": 220
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989755833107875e-05,
+ "loss": 0.765,
+ "step": 221
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99894611037675e-05,
+ "loss": 0.747,
+ "step": 222
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998916219682794e-05,
+ "loss": 0.8382,
+ "step": 223
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.99888591124142e-05,
+ "loss": 0.8546,
+ "step": 224
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998855185065303e-05,
+ "loss": 0.8067,
+ "step": 225
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9988240411672933e-05,
+ "loss": 0.8166,
+ "step": 226
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998792479560414e-05,
+ "loss": 0.8123,
+ "step": 227
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987605002578655e-05,
+ "loss": 0.8308,
+ "step": 228
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987281032730206e-05,
+ "loss": 0.7427,
+ "step": 229
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998695288619428e-05,
+ "loss": 0.8117,
+ "step": 230
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986620563108117e-05,
+ "loss": 0.8197,
+ "step": 231
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986284063610682e-05,
+ "loss": 0.7233,
+ "step": 232
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985943387842704e-05,
+ "loss": 0.9237,
+ "step": 233
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985598535946655e-05,
+ "loss": 0.7817,
+ "step": 234
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985249508066754e-05,
+ "loss": 0.8877,
+ "step": 235
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998489630434896e-05,
+ "loss": 0.7996,
+ "step": 236
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984538924940987e-05,
+ "loss": 0.841,
+ "step": 237
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984177369992287e-05,
+ "loss": 0.7545,
+ "step": 238
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998381163965407e-05,
+ "loss": 0.8969,
+ "step": 239
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983441734079275e-05,
+ "loss": 0.8402,
+ "step": 240
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983067653422603e-05,
+ "loss": 0.9041,
+ "step": 241
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982689397840497e-05,
+ "loss": 0.8491,
+ "step": 242
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982306967491136e-05,
+ "loss": 0.7784,
+ "step": 243
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981920362534456e-05,
+ "loss": 0.8131,
+ "step": 244
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998152958313214e-05,
+ "loss": 0.7756,
+ "step": 245
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9981134629447605e-05,
+ "loss": 0.8029,
+ "step": 246
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980735501646026e-05,
+ "loss": 0.7458,
+ "step": 247
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980332199894317e-05,
+ "loss": 0.8052,
+ "step": 248
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997992472436114e-05,
+ "loss": 0.8717,
+ "step": 249
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979513075216898e-05,
+ "loss": 0.8225,
+ "step": 250
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979097252633748e-05,
+ "loss": 0.7368,
+ "step": 251
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978677256785583e-05,
+ "loss": 0.83,
+ "step": 252
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978253087848046e-05,
+ "loss": 0.7651,
+ "step": 253
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977824745998528e-05,
+ "loss": 0.8736,
+ "step": 254
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977392231416158e-05,
+ "loss": 0.8917,
+ "step": 255
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976955544281815e-05,
+ "loss": 0.7897,
+ "step": 256
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976514684778124e-05,
+ "loss": 0.8666,
+ "step": 257
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976069653089452e-05,
+ "loss": 0.7955,
+ "step": 258
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9975620449401913e-05,
+ "loss": 0.8105,
+ "step": 259
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9975167073903355e-05,
+ "loss": 0.7716,
+ "step": 260
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997470952678339e-05,
+ "loss": 0.8308,
+ "step": 261
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9974247808233364e-05,
+ "loss": 0.763,
+ "step": 262
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973781918446363e-05,
+ "loss": 0.8117,
+ "step": 263
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973311857617224e-05,
+ "loss": 0.8303,
+ "step": 264
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972837625942533e-05,
+ "loss": 0.8411,
+ "step": 265
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99723592236206e-05,
+ "loss": 0.7631,
+ "step": 266
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997187665085151e-05,
+ "loss": 0.7851,
+ "step": 267
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997138990783706e-05,
+ "loss": 0.8072,
+ "step": 268
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997089899478082e-05,
+ "loss": 0.8505,
+ "step": 269
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9970403911888077e-05,
+ "loss": 0.7427,
+ "step": 270
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969904659365887e-05,
+ "loss": 0.7301,
+ "step": 271
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996940123742303e-05,
+ "loss": 0.8509,
+ "step": 272
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968893646270037e-05,
+ "loss": 0.825,
+ "step": 273
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.996838188611919e-05,
+ "loss": 0.8391,
+ "step": 274
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.99678659571845e-05,
+ "loss": 0.8541,
+ "step": 275
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9967345859681732e-05,
+ "loss": 0.7657,
+ "step": 276
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9966821593828393e-05,
+ "loss": 0.8522,
+ "step": 277
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9966293159843726e-05,
+ "loss": 0.8231,
+ "step": 278
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996576055794873e-05,
+ "loss": 0.7555,
+ "step": 279
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996522378836613e-05,
+ "loss": 0.7701,
+ "step": 280
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996468285132041e-05,
+ "loss": 0.7671,
+ "step": 281
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996413774703779e-05,
+ "loss": 0.8233,
+ "step": 282
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9963588475746233e-05,
+ "loss": 0.8129,
+ "step": 283
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996303503767544e-05,
+ "loss": 0.8721,
+ "step": 284
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962477433056864e-05,
+ "loss": 0.9024,
+ "step": 285
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9961915662123694e-05,
+ "loss": 0.7138,
+ "step": 286
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996134972511086e-05,
+ "loss": 0.8335,
+ "step": 287
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9960779622255045e-05,
+ "loss": 0.7992,
+ "step": 288
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996020535379466e-05,
+ "loss": 0.7676,
+ "step": 289
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959626919969866e-05,
+ "loss": 0.8192,
+ "step": 290
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959044321022563e-05,
+ "loss": 0.7898,
+ "step": 291
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99584575571964e-05,
+ "loss": 0.8549,
+ "step": 292
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957866628736756e-05,
+ "loss": 0.7291,
+ "step": 293
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957271535890756e-05,
+ "loss": 0.8671,
+ "step": 294
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956672278907273e-05,
+ "loss": 0.859,
+ "step": 295
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956068858036918e-05,
+ "loss": 0.8595,
+ "step": 296
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955461273532037e-05,
+ "loss": 0.8011,
+ "step": 297
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954849525646727e-05,
+ "loss": 0.7238,
+ "step": 298
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954233614636817e-05,
+ "loss": 0.8043,
+ "step": 299
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9953613540759885e-05,
+ "loss": 0.8481,
+ "step": 300
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995298930427524e-05,
+ "loss": 0.7829,
+ "step": 301
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995236090544395e-05,
+ "loss": 0.8107,
+ "step": 302
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99517283445288e-05,
+ "loss": 0.7894,
+ "step": 303
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995109162179433e-05,
+ "loss": 0.7687,
+ "step": 304
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950450737506825e-05,
+ "loss": 0.8176,
+ "step": 305
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949805691934293e-05,
+ "loss": 0.723,
+ "step": 306
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949156485346502e-05,
+ "loss": 0.7644,
+ "step": 307
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9948503118014944e-05,
+ "loss": 0.8572,
+ "step": 308
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994784559021286e-05,
+ "loss": 0.8818,
+ "step": 309
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9947183902215227e-05,
+ "loss": 0.7593,
+ "step": 310
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946518054298768e-05,
+ "loss": 0.826,
+ "step": 311
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9945848046741934e-05,
+ "loss": 0.8223,
+ "step": 312
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994517387982493e-05,
+ "loss": 0.8624,
+ "step": 313
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9944495553829685e-05,
+ "loss": 0.756,
+ "step": 314
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943813069039883e-05,
+ "loss": 0.883,
+ "step": 315
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943126425740935e-05,
+ "loss": 0.7858,
+ "step": 316
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994243562422e-05,
+ "loss": 0.766,
+ "step": 317
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941740664765964e-05,
+ "loss": 0.8252,
+ "step": 318
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941041547669467e-05,
+ "loss": 0.8443,
+ "step": 319
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9940338273222878e-05,
+ "loss": 0.8146,
+ "step": 320
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939630841720302e-05,
+ "loss": 0.807,
+ "step": 321
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9938919253457593e-05,
+ "loss": 0.7775,
+ "step": 322
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993820350873234e-05,
+ "loss": 0.7027,
+ "step": 323
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993748360784386e-05,
+ "loss": 0.7489,
+ "step": 324
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993675955109322e-05,
+ "loss": 0.8178,
+ "step": 325
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9936031338783226e-05,
+ "loss": 0.7377,
+ "step": 326
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993529897121841e-05,
+ "loss": 0.7828,
+ "step": 327
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9934562448705048e-05,
+ "loss": 0.8599,
+ "step": 328
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993382177155116e-05,
+ "loss": 0.7215,
+ "step": 329
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9933076940066494e-05,
+ "loss": 0.784,
+ "step": 330
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993232795456254e-05,
+ "loss": 0.7459,
+ "step": 331
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993157481535252e-05,
+ "loss": 0.7763,
+ "step": 332
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930817522751403e-05,
+ "loss": 0.8394,
+ "step": 333
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993005607707589e-05,
+ "loss": 0.7155,
+ "step": 334
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9929290478644412e-05,
+ "loss": 0.801,
+ "step": 335
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9928520727777144e-05,
+ "loss": 0.8,
+ "step": 336
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927746824796003e-05,
+ "loss": 0.7749,
+ "step": 337
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992696877002463e-05,
+ "loss": 0.8266,
+ "step": 338
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992618656378841e-05,
+ "loss": 0.8793,
+ "step": 339
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992540020641446e-05,
+ "loss": 0.8093,
+ "step": 340
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924609698231637e-05,
+ "loss": 0.8063,
+ "step": 341
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992381503957053e-05,
+ "loss": 0.7976,
+ "step": 342
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992301623076347e-05,
+ "loss": 0.7654,
+ "step": 343
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9922213272144516e-05,
+ "loss": 0.8301,
+ "step": 344
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921406164049467e-05,
+ "loss": 0.7587,
+ "step": 345
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9920594906815856e-05,
+ "loss": 0.828,
+ "step": 346
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991977950078295e-05,
+ "loss": 0.8126,
+ "step": 347
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918959946291752e-05,
+ "loss": 0.7938,
+ "step": 348
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918136243685003e-05,
+ "loss": 0.7261,
+ "step": 349
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9917308393307174e-05,
+ "loss": 0.8347,
+ "step": 350
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916476395504473e-05,
+ "loss": 0.7497,
+ "step": 351
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9915640250624844e-05,
+ "loss": 0.8052,
+ "step": 352
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991479995901796e-05,
+ "loss": 0.755,
+ "step": 353
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9913955521035234e-05,
+ "loss": 0.8091,
+ "step": 354
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991310693702981e-05,
+ "loss": 0.8132,
+ "step": 355
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9912254207356562e-05,
+ "loss": 0.8557,
+ "step": 356
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991139733237211e-05,
+ "loss": 0.869,
+ "step": 357
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9910536312434795e-05,
+ "loss": 0.7256,
+ "step": 358
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9909671147904693e-05,
+ "loss": 0.7123,
+ "step": 359
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9908801839143622e-05,
+ "loss": 0.842,
+ "step": 360
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907928386515126e-05,
+ "loss": 0.878,
+ "step": 361
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907050790384486e-05,
+ "loss": 0.8455,
+ "step": 362
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990616905111871e-05,
+ "loss": 0.7852,
+ "step": 363
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990528316908654e-05,
+ "loss": 0.8359,
+ "step": 364
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904393144658455e-05,
+ "loss": 0.774,
+ "step": 365
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9903498978206663e-05,
+ "loss": 0.7442,
+ "step": 366
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902600670105107e-05,
+ "loss": 0.7627,
+ "step": 367
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9901698220729458e-05,
+ "loss": 0.8005,
+ "step": 368
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900791630457122e-05,
+ "loss": 0.8046,
+ "step": 369
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9899880899667233e-05,
+ "loss": 0.7473,
+ "step": 370
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9898966028740662e-05,
+ "loss": 0.8911,
+ "step": 371
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989804701806001e-05,
+ "loss": 0.7924,
+ "step": 372
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897123868009606e-05,
+ "loss": 0.6458,
+ "step": 373
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.989619657897551e-05,
+ "loss": 0.7748,
+ "step": 374
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9895265151345516e-05,
+ "loss": 0.8192,
+ "step": 375
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9894329585509153e-05,
+ "loss": 0.9097,
+ "step": 376
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893389881857665e-05,
+ "loss": 0.822,
+ "step": 377
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9892446040784046e-05,
+ "loss": 0.7947,
+ "step": 378
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891498062683005e-05,
+ "loss": 0.7356,
+ "step": 379
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.989054594795099e-05,
+ "loss": 0.8425,
+ "step": 380
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9889589696986173e-05,
+ "loss": 0.7747,
+ "step": 381
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9888629310188467e-05,
+ "loss": 0.8468,
+ "step": 382
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9887664787959495e-05,
+ "loss": 0.7453,
+ "step": 383
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9886696130702628e-05,
+ "loss": 0.7112,
+ "step": 384
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988572333882296e-05,
+ "loss": 0.6379,
+ "step": 385
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9884746412727305e-05,
+ "loss": 0.7834,
+ "step": 386
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9883765352824224e-05,
+ "loss": 0.8476,
+ "step": 387
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9882780159523993e-05,
+ "loss": 0.8154,
+ "step": 388
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881790833238615e-05,
+ "loss": 0.7342,
+ "step": 389
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9880797374381837e-05,
+ "loss": 0.8645,
+ "step": 390
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9879799783369117e-05,
+ "loss": 0.7275,
+ "step": 391
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9878798060617653e-05,
+ "loss": 0.8935,
+ "step": 392
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987779220654636e-05,
+ "loss": 0.7538,
+ "step": 393
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987678222157589e-05,
+ "loss": 0.7957,
+ "step": 394
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875768106128627e-05,
+ "loss": 0.7062,
+ "step": 395
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9874749860628658e-05,
+ "loss": 0.8019,
+ "step": 396
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987372748550183e-05,
+ "loss": 0.7829,
+ "step": 397
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987270098117569e-05,
+ "loss": 0.7659,
+ "step": 398
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987167034807953e-05,
+ "loss": 0.7441,
+ "step": 399
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987063558664436e-05,
+ "loss": 0.7279,
+ "step": 400
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869596697302912e-05,
+ "loss": 0.7052,
+ "step": 401
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9868553680489654e-05,
+ "loss": 0.797,
+ "step": 402
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9867506536640782e-05,
+ "loss": 0.8472,
+ "step": 403
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9866455266194206e-05,
+ "loss": 0.7684,
+ "step": 404
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9865399869589565e-05,
+ "loss": 0.6435,
+ "step": 405
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9864340347268233e-05,
+ "loss": 0.7709,
+ "step": 406
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863276699673305e-05,
+ "loss": 0.8196,
+ "step": 407
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.986220892724959e-05,
+ "loss": 0.7679,
+ "step": 408
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9861137030443638e-05,
+ "loss": 0.7767,
+ "step": 409
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9860061009703714e-05,
+ "loss": 0.8448,
+ "step": 410
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985898086547981e-05,
+ "loss": 0.7682,
+ "step": 411
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9857896598223644e-05,
+ "loss": 0.7095,
+ "step": 412
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856808208388663e-05,
+ "loss": 0.8691,
+ "step": 413
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9855715696430023e-05,
+ "loss": 0.8229,
+ "step": 414
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985461906280462e-05,
+ "loss": 0.7608,
+ "step": 415
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985351830797106e-05,
+ "loss": 0.7777,
+ "step": 416
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852413432389685e-05,
+ "loss": 0.7494,
+ "step": 417
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9851304436522555e-05,
+ "loss": 0.8084,
+ "step": 418
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985019132083345e-05,
+ "loss": 0.7939,
+ "step": 419
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9849074085787875e-05,
+ "loss": 0.817,
+ "step": 420
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847952731853067e-05,
+ "loss": 0.8187,
+ "step": 421
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9846827259497964e-05,
+ "loss": 0.8406,
+ "step": 422
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9845697669193248e-05,
+ "loss": 0.8258,
+ "step": 423
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9844563961411308e-05,
+ "loss": 0.7855,
+ "step": 424
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.984342613662627e-05,
+ "loss": 0.7398,
+ "step": 425
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9842284195313964e-05,
+ "loss": 0.8047,
+ "step": 426
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984113813795196e-05,
+ "loss": 0.7261,
+ "step": 427
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9839987965019532e-05,
+ "loss": 0.8001,
+ "step": 428
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9838833676997687e-05,
+ "loss": 0.8033,
+ "step": 429
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9837675274369147e-05,
+ "loss": 0.8345,
+ "step": 430
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836512757618355e-05,
+ "loss": 0.7461,
+ "step": 431
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9835346127231483e-05,
+ "loss": 0.7678,
+ "step": 432
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983417538369641e-05,
+ "loss": 0.6857,
+ "step": 433
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9833000527502746e-05,
+ "loss": 0.7882,
+ "step": 434
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831821559141817e-05,
+ "loss": 0.8759,
+ "step": 435
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983063847910666e-05,
+ "loss": 0.8211,
+ "step": 436
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829451287892048e-05,
+ "loss": 0.7557,
+ "step": 437
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9828259985994465e-05,
+ "loss": 0.7531,
+ "step": 438
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9827064573912113e-05,
+ "loss": 0.8644,
+ "step": 439
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9825865052144914e-05,
+ "loss": 0.8186,
+ "step": 440
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824661421194506e-05,
+ "loss": 0.715,
+ "step": 441
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.982345368156425e-05,
+ "loss": 0.7609,
+ "step": 442
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241833759226e-05,
+ "loss": 0.7673,
+ "step": 443
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9821025878286227e-05,
+ "loss": 0.7543,
+ "step": 444
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819805815653768e-05,
+ "loss": 0.7529,
+ "step": 445
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981858164637208e-05,
+ "loss": 0.7871,
+ "step": 446
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817353370953105e-05,
+ "loss": 0.9059,
+ "step": 447
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981612098991052e-05,
+ "loss": 0.8269,
+ "step": 448
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9814884503759698e-05,
+ "loss": 0.8343,
+ "step": 449
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9813643913017743e-05,
+ "loss": 0.8573,
+ "step": 450
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812399218203467e-05,
+ "loss": 0.8051,
+ "step": 451
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.981115041983741e-05,
+ "loss": 0.8358,
+ "step": 452
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809897518441813e-05,
+ "loss": 0.7895,
+ "step": 453
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980864051454064e-05,
+ "loss": 0.8338,
+ "step": 454
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980737940865958e-05,
+ "loss": 0.84,
+ "step": 455
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9806114201326015e-05,
+ "loss": 0.7632,
+ "step": 456
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9804844893069063e-05,
+ "loss": 0.8006,
+ "step": 457
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980357148441955e-05,
+ "loss": 0.8932,
+ "step": 458
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9802293975910016e-05,
+ "loss": 0.7253,
+ "step": 459
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9801012368074713e-05,
+ "loss": 0.8361,
+ "step": 460
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979972666144961e-05,
+ "loss": 0.7938,
+ "step": 461
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9798436856572397e-05,
+ "loss": 0.8705,
+ "step": 462
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9797142953982463e-05,
+ "loss": 0.7717,
+ "step": 463
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9795844954220926e-05,
+ "loss": 0.7615,
+ "step": 464
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794542857830605e-05,
+ "loss": 0.834,
+ "step": 465
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9793236665356043e-05,
+ "loss": 0.7362,
+ "step": 466
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979192637734348e-05,
+ "loss": 0.7887,
+ "step": 467
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979061199434089e-05,
+ "loss": 0.8037,
+ "step": 468
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789293516897947e-05,
+ "loss": 0.8313,
+ "step": 469
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9787970945566036e-05,
+ "loss": 0.7911,
+ "step": 470
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9786644280898258e-05,
+ "loss": 0.7675,
+ "step": 471
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9785313523449428e-05,
+ "loss": 0.8284,
+ "step": 472
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9783978673776067e-05,
+ "loss": 0.724,
+ "step": 473
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9782639732436406e-05,
+ "loss": 0.7568,
+ "step": 474
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9781296699990398e-05,
+ "loss": 0.7646,
+ "step": 475
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97799495769997e-05,
+ "loss": 0.7908,
+ "step": 476
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977859836402767e-05,
+ "loss": 0.7573,
+ "step": 477
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97772430616394e-05,
+ "loss": 0.7799,
+ "step": 478
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775883670401667e-05,
+ "loss": 0.7666,
+ "step": 479
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9774520190882978e-05,
+ "loss": 0.7763,
+ "step": 480
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773152623653535e-05,
+ "loss": 0.8377,
+ "step": 481
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9771780969285258e-05,
+ "loss": 0.7484,
+ "step": 482
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977040522835177e-05,
+ "loss": 0.8163,
+ "step": 483
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9769025401428417e-05,
+ "loss": 0.907,
+ "step": 484
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767641489092232e-05,
+ "loss": 0.7216,
+ "step": 485
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9766253491921972e-05,
+ "loss": 0.7122,
+ "step": 486
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97648614104981e-05,
+ "loss": 0.7274,
+ "step": 487
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9763465245402784e-05,
+ "loss": 0.8164,
+ "step": 488
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97620649972199e-05,
+ "loss": 0.8084,
+ "step": 489
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9760660666535037e-05,
+ "loss": 0.6848,
+ "step": 490
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9759252253935485e-05,
+ "loss": 0.76,
+ "step": 491
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975783976001024e-05,
+ "loss": 0.8256,
+ "step": 492
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975642318535001e-05,
+ "loss": 0.7402,
+ "step": 493
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9755002530547206e-05,
+ "loss": 0.7095,
+ "step": 494
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975357779619595e-05,
+ "loss": 0.8733,
+ "step": 495
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9752148982892066e-05,
+ "loss": 0.8731,
+ "step": 496
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750716091233083e-05,
+ "loss": 0.7832,
+ "step": 497
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9749279121818235e-05,
+ "loss": 0.748,
+ "step": 498
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974783807524847e-05,
+ "loss": 0.8768,
+ "step": 499
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9746392952126432e-05,
+ "loss": 0.8005,
+ "step": 500
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974494375305647e-05,
+ "loss": 0.7563,
+ "step": 501
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974349047864464e-05,
+ "loss": 0.8288,
+ "step": 502
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974203312949871e-05,
+ "loss": 0.7402,
+ "step": 503
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9740571706228136e-05,
+ "loss": 0.7911,
+ "step": 504
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9739106209444087e-05,
+ "loss": 0.7663,
+ "step": 505
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9737636639759436e-05,
+ "loss": 0.7718,
+ "step": 506
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736162997788757e-05,
+ "loss": 0.7658,
+ "step": 507
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.973468528414833e-05,
+ "loss": 0.7325,
+ "step": 508
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733203499456136e-05,
+ "loss": 0.7028,
+ "step": 509
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9731717644331857e-05,
+ "loss": 0.8122,
+ "step": 510
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9730227719396883e-05,
+ "loss": 0.8926,
+ "step": 511
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9728733725274292e-05,
+ "loss": 0.7006,
+ "step": 512
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9727235662588882e-05,
+ "loss": 0.7792,
+ "step": 513
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9725733531967137e-05,
+ "loss": 0.7617,
+ "step": 514
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9724227334037255e-05,
+ "loss": 0.6689,
+ "step": 515
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.972271706942913e-05,
+ "loss": 0.8138,
+ "step": 516
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9721202738774346e-05,
+ "loss": 0.8198,
+ "step": 517
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9719684342706207e-05,
+ "loss": 0.8248,
+ "step": 518
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9718161881859703e-05,
+ "loss": 0.9099,
+ "step": 519
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971663535687153e-05,
+ "loss": 0.8089,
+ "step": 520
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9715104768380078e-05,
+ "loss": 0.7548,
+ "step": 521
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9713570117025444e-05,
+ "loss": 0.8009,
+ "step": 522
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971203140344942e-05,
+ "loss": 0.841,
+ "step": 523
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9710488628295495e-05,
+ "loss": 0.7868,
+ "step": 524
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9708941792208857e-05,
+ "loss": 0.8005,
+ "step": 525
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.97073908958364e-05,
+ "loss": 0.7595,
+ "step": 526
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9705835939826704e-05,
+ "loss": 0.845,
+ "step": 527
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.970427692483006e-05,
+ "loss": 0.7474,
+ "step": 528
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9702713851498436e-05,
+ "loss": 0.7773,
+ "step": 529
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9701146720485523e-05,
+ "loss": 0.7658,
+ "step": 530
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9699575532446688e-05,
+ "loss": 0.7375,
+ "step": 531
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969800028803901e-05,
+ "loss": 0.8,
+ "step": 532
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969642098792125e-05,
+ "loss": 0.6123,
+ "step": 533
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9694837632753877e-05,
+ "loss": 0.7436,
+ "step": 534
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693250223199053e-05,
+ "loss": 0.7228,
+ "step": 535
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9691658759920625e-05,
+ "loss": 0.8078,
+ "step": 536
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690063243584153e-05,
+ "loss": 0.7944,
+ "step": 537
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968846367485688e-05,
+ "loss": 0.8483,
+ "step": 538
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686860054407745e-05,
+ "loss": 0.7646,
+ "step": 539
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9685252382907388e-05,
+ "loss": 0.7652,
+ "step": 540
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968364066102813e-05,
+ "loss": 0.8162,
+ "step": 541
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9682024889444003e-05,
+ "loss": 0.7168,
+ "step": 542
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9680405068830716e-05,
+ "loss": 0.8746,
+ "step": 543
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9678781199865688e-05,
+ "loss": 0.8029,
+ "step": 544
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967715328322801e-05,
+ "loss": 0.84,
+ "step": 545
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967552131959849e-05,
+ "loss": 0.6089,
+ "step": 546
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967388530965961e-05,
+ "loss": 0.8415,
+ "step": 547
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967224525409555e-05,
+ "loss": 0.7964,
+ "step": 548
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967060115359218e-05,
+ "loss": 0.8413,
+ "step": 549
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9668953008837073e-05,
+ "loss": 0.7905,
+ "step": 550
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9667300820519478e-05,
+ "loss": 0.8264,
+ "step": 551
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.966564458933034e-05,
+ "loss": 0.806,
+ "step": 552
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96639843159623e-05,
+ "loss": 0.8029,
+ "step": 553
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9662320001109682e-05,
+ "loss": 0.8018,
+ "step": 554
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9660651645468504e-05,
+ "loss": 0.779,
+ "step": 555
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9658979249736475e-05,
+ "loss": 0.8456,
+ "step": 556
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965730281461299e-05,
+ "loss": 0.7167,
+ "step": 557
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965562234079914e-05,
+ "loss": 0.749,
+ "step": 558
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9653937828997694e-05,
+ "loss": 0.7274,
+ "step": 559
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9652249279913122e-05,
+ "loss": 0.787,
+ "step": 560
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965055669425157e-05,
+ "loss": 0.7936,
+ "step": 561
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9648860072720884e-05,
+ "loss": 0.8001,
+ "step": 562
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9647159416030588e-05,
+ "loss": 0.8321,
+ "step": 563
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96454547248919e-05,
+ "loss": 0.7946,
+ "step": 564
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9643746000017722e-05,
+ "loss": 0.8438,
+ "step": 565
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9642033242122643e-05,
+ "loss": 0.7729,
+ "step": 566
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9640316451922943e-05,
+ "loss": 0.8088,
+ "step": 567
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.963859563013658e-05,
+ "loss": 0.7314,
+ "step": 568
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636870777483207e-05,
+ "loss": 0.8122,
+ "step": 569
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9635141894684158e-05,
+ "loss": 0.7591,
+ "step": 570
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633408982462453e-05,
+ "loss": 0.6553,
+ "step": 571
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9631672041542792e-05,
+ "loss": 0.8778,
+ "step": 572
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629931072651572e-05,
+ "loss": 0.7156,
+ "step": 573
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.962818607651686e-05,
+ "loss": 0.8251,
+ "step": 574
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9626437053868423e-05,
+ "loss": 0.8687,
+ "step": 575
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96246840054377e-05,
+ "loss": 0.7715,
+ "step": 576
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962292693195781e-05,
+ "loss": 0.781,
+ "step": 577
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962116583416357e-05,
+ "loss": 0.7446,
+ "step": 578
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9619400712791473e-05,
+ "loss": 0.8585,
+ "step": 579
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961763156857969e-05,
+ "loss": 0.6477,
+ "step": 580
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961585840226808e-05,
+ "loss": 0.7953,
+ "step": 581
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961408121459818e-05,
+ "loss": 0.8055,
+ "step": 582
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9612300006313216e-05,
+ "loss": 0.7574,
+ "step": 583
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9610514778158086e-05,
+ "loss": 0.7675,
+ "step": 584
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9608725530879376e-05,
+ "loss": 0.7928,
+ "step": 585
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960693226522535e-05,
+ "loss": 0.7497,
+ "step": 586
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960513498194595e-05,
+ "loss": 0.6311,
+ "step": 587
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9603333681792804e-05,
+ "loss": 0.8121,
+ "step": 588
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9601528365519218e-05,
+ "loss": 0.7525,
+ "step": 589
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9599719033880173e-05,
+ "loss": 0.7465,
+ "step": 590
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597905687632336e-05,
+ "loss": 0.8092,
+ "step": 591
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9596088327534045e-05,
+ "loss": 0.7337,
+ "step": 592
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959426695434533e-05,
+ "loss": 0.791,
+ "step": 593
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9592441568827877e-05,
+ "loss": 0.8356,
+ "step": 594
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590612171745076e-05,
+ "loss": 0.9026,
+ "step": 595
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9588778763861977e-05,
+ "loss": 0.7833,
+ "step": 596
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958694134594531e-05,
+ "loss": 0.8231,
+ "step": 597
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958509991876349e-05,
+ "loss": 0.7602,
+ "step": 598
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95832544830866e-05,
+ "loss": 0.7539,
+ "step": 599
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95814050396864e-05,
+ "loss": 0.8256,
+ "step": 600
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579551589336333e-05,
+ "loss": 0.7742,
+ "step": 601
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9577694132811508e-05,
+ "loss": 0.8068,
+ "step": 602
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9575832670888723e-05,
+ "loss": 0.7282,
+ "step": 603
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957396720434643e-05,
+ "loss": 0.7994,
+ "step": 604
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957209773396478e-05,
+ "loss": 0.8836,
+ "step": 605
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957022426052558e-05,
+ "loss": 0.7882,
+ "step": 606
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956834678481232e-05,
+ "loss": 0.7531,
+ "step": 607
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9566465307610163e-05,
+ "loss": 0.7727,
+ "step": 608
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9564579829705945e-05,
+ "loss": 0.8498,
+ "step": 609
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9562690351888167e-05,
+ "loss": 0.7812,
+ "step": 610
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9560796874947016e-05,
+ "loss": 0.7779,
+ "step": 611
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9558899399674343e-05,
+ "loss": 0.6316,
+ "step": 612
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9556997926863676e-05,
+ "loss": 0.7495,
+ "step": 613
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9555092457310208e-05,
+ "loss": 0.7697,
+ "step": 614
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9553182991810812e-05,
+ "loss": 0.7033,
+ "step": 615
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9551269531164026e-05,
+ "loss": 0.7297,
+ "step": 616
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954935207617006e-05,
+ "loss": 0.743,
+ "step": 617
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9547430627630794e-05,
+ "loss": 0.8469,
+ "step": 618
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954550518634978e-05,
+ "loss": 0.8328,
+ "step": 619
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9543575753132242e-05,
+ "loss": 0.7569,
+ "step": 620
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541642328785065e-05,
+ "loss": 0.8422,
+ "step": 621
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9539704914116808e-05,
+ "loss": 0.7654,
+ "step": 622
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537763509937707e-05,
+ "loss": 0.8128,
+ "step": 623
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.953581811705965e-05,
+ "loss": 0.789,
+ "step": 624
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533868736296205e-05,
+ "loss": 0.7303,
+ "step": 625
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9531915368462604e-05,
+ "loss": 0.7485,
+ "step": 626
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529958014375748e-05,
+ "loss": 0.7696,
+ "step": 627
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95279966748542e-05,
+ "loss": 0.7085,
+ "step": 628
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95260313507182e-05,
+ "loss": 0.7918,
+ "step": 629
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952406204278964e-05,
+ "loss": 0.7719,
+ "step": 630
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952208875189209e-05,
+ "loss": 0.7739,
+ "step": 631
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9520111478850784e-05,
+ "loss": 0.803,
+ "step": 632
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518130224492612e-05,
+ "loss": 0.7166,
+ "step": 633
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9516144989646145e-05,
+ "loss": 0.7739,
+ "step": 634
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9514155775141602e-05,
+ "loss": 0.8098,
+ "step": 635
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9512162581810877e-05,
+ "loss": 0.7585,
+ "step": 636
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9510165410487524e-05,
+ "loss": 0.7151,
+ "step": 637
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9508164262006758e-05,
+ "loss": 0.7904,
+ "step": 638
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950615913720547e-05,
+ "loss": 0.7659,
+ "step": 639
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9504150036922195e-05,
+ "loss": 0.6909,
+ "step": 640
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9502136961997144e-05,
+ "loss": 0.8619,
+ "step": 641
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9500119913272186e-05,
+ "loss": 0.7418,
+ "step": 642
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9498098891590855e-05,
+ "loss": 0.837,
+ "step": 643
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9496073897798333e-05,
+ "loss": 0.7879,
+ "step": 644
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9494044932741488e-05,
+ "loss": 0.8256,
+ "step": 645
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9492011997268825e-05,
+ "loss": 0.7214,
+ "step": 646
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948997509223052e-05,
+ "loss": 0.724,
+ "step": 647
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9487934218478413e-05,
+ "loss": 0.7929,
+ "step": 648
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485889376865998e-05,
+ "loss": 0.7274,
+ "step": 649
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948384056824842e-05,
+ "loss": 0.6854,
+ "step": 650
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481787793482503e-05,
+ "loss": 0.7363,
+ "step": 651
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9479731053426712e-05,
+ "loss": 0.7963,
+ "step": 652
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9477670348941182e-05,
+ "loss": 0.7923,
+ "step": 653
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9475605680887696e-05,
+ "loss": 0.7935,
+ "step": 654
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473537050129704e-05,
+ "loss": 0.8609,
+ "step": 655
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9471464457532307e-05,
+ "loss": 0.8394,
+ "step": 656
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469387903962263e-05,
+ "loss": 0.8596,
+ "step": 657
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9467307390287993e-05,
+ "loss": 0.736,
+ "step": 658
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946522291737956e-05,
+ "loss": 0.6914,
+ "step": 659
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94631344861087e-05,
+ "loss": 0.7047,
+ "step": 660
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461042097348796e-05,
+ "loss": 0.7586,
+ "step": 661
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945894575197488e-05,
+ "loss": 0.8357,
+ "step": 662
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9456845450863654e-05,
+ "loss": 0.7623,
+ "step": 663
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9454741194893456e-05,
+ "loss": 0.8468,
+ "step": 664
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9452632984944295e-05,
+ "loss": 0.7955,
+ "step": 665
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.945052082189782e-05,
+ "loss": 0.7707,
+ "step": 666
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9448404706637344e-05,
+ "loss": 0.7176,
+ "step": 667
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9446284640047822e-05,
+ "loss": 0.8351,
+ "step": 668
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9444160623015877e-05,
+ "loss": 0.8392,
+ "step": 669
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.944203265642976e-05,
+ "loss": 0.7288,
+ "step": 670
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94399007411794e-05,
+ "loss": 0.8431,
+ "step": 671
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.943776487815636e-05,
+ "loss": 0.8447,
+ "step": 672
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9435625068253863e-05,
+ "loss": 0.8001,
+ "step": 673
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.943348131236677e-05,
+ "loss": 0.8232,
+ "step": 674
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9431333611391615e-05,
+ "loss": 0.7654,
+ "step": 675
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9429181966226558e-05,
+ "loss": 0.7788,
+ "step": 676
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.942702637777142e-05,
+ "loss": 0.7626,
+ "step": 677
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.942486684692767e-05,
+ "loss": 0.8699,
+ "step": 678
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422703374598425e-05,
+ "loss": 0.806,
+ "step": 679
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9420535961688452e-05,
+ "loss": 0.8178,
+ "step": 680
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9418364609104166e-05,
+ "loss": 0.7274,
+ "step": 681
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9416189317753625e-05,
+ "loss": 0.8199,
+ "step": 682
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9414010088546535e-05,
+ "loss": 0.7408,
+ "step": 683
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.941182692239426e-05,
+ "loss": 0.7945,
+ "step": 684
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.940963982020979e-05,
+ "loss": 0.7571,
+ "step": 685
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.940744878290778e-05,
+ "loss": 0.849,
+ "step": 686
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405253811404522e-05,
+ "loss": 0.8317,
+ "step": 687
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9403054906617952e-05,
+ "loss": 0.841,
+ "step": 688
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9400852069467657e-05,
+ "loss": 0.77,
+ "step": 689
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9398645300874865e-05,
+ "loss": 0.8495,
+ "step": 690
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9396434601762444e-05,
+ "loss": 0.7203,
+ "step": 691
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9394219973054912e-05,
+ "loss": 0.7857,
+ "step": 692
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939200141567843e-05,
+ "loss": 0.7989,
+ "step": 693
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93897789305608e-05,
+ "loss": 0.757,
+ "step": 694
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938755251863146e-05,
+ "loss": 0.7395,
+ "step": 695
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9385322180821506e-05,
+ "loss": 0.7638,
+ "step": 696
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9383087918063662e-05,
+ "loss": 0.7983,
+ "step": 697
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.93808497312923e-05,
+ "loss": 0.8139,
+ "step": 698
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937860762144343e-05,
+ "loss": 0.7137,
+ "step": 699
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9376361589454703e-05,
+ "loss": 0.6735,
+ "step": 700
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9374111636265412e-05,
+ "loss": 0.6698,
+ "step": 701
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937185776281649e-05,
+ "loss": 0.7318,
+ "step": 702
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9369599970050506e-05,
+ "loss": 0.8274,
+ "step": 703
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9367338258911674e-05,
+ "loss": 0.7852,
+ "step": 704
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9365072630345838e-05,
+ "loss": 0.805,
+ "step": 705
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9362803085300492e-05,
+ "loss": 0.7211,
+ "step": 706
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360529624724754e-05,
+ "loss": 0.764,
+ "step": 707
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935825224956939e-05,
+ "loss": 0.7179,
+ "step": 708
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9355970960786803e-05,
+ "loss": 0.8037,
+ "step": 709
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9353685759331027e-05,
+ "loss": 0.7705,
+ "step": 710
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935139664615773e-05,
+ "loss": 0.7089,
+ "step": 711
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934910362222423e-05,
+ "loss": 0.7649,
+ "step": 712
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934680668848946e-05,
+ "loss": 0.8138,
+ "step": 713
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9344505845914008e-05,
+ "loss": 0.8359,
+ "step": 714
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9342201095460083e-05,
+ "loss": 0.785,
+ "step": 715
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9339892438091538e-05,
+ "loss": 0.7485,
+ "step": 716
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933757987477385e-05,
+ "loss": 0.7442,
+ "step": 717
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933526340647414e-05,
+ "loss": 0.7827,
+ "step": 718
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933294303416115e-05,
+ "loss": 0.8907,
+ "step": 719
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9330618758805264e-05,
+ "loss": 0.7622,
+ "step": 720
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9328290581378497e-05,
+ "loss": 0.7666,
+ "step": 721
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9325958502854495e-05,
+ "loss": 0.7697,
+ "step": 722
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932362252420853e-05,
+ "loss": 0.7319,
+ "step": 723
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9321282646417515e-05,
+ "loss": 0.7406,
+ "step": 724
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318938870459984e-05,
+ "loss": 0.8314,
+ "step": 725
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.931659119731611e-05,
+ "loss": 0.6448,
+ "step": 726
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9314239627967688e-05,
+ "loss": 0.8353,
+ "step": 727
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9311884163398148e-05,
+ "loss": 0.7971,
+ "step": 728
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309524804592545e-05,
+ "loss": 0.7616,
+ "step": 729
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9307161552537566e-05,
+ "loss": 0.8114,
+ "step": 730
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9304794408221524e-05,
+ "loss": 0.7054,
+ "step": 731
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.930242337263436e-05,
+ "loss": 0.731,
+ "step": 732
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9300048446767643e-05,
+ "loss": 0.7926,
+ "step": 733
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9297669631614568e-05,
+ "loss": 0.7254,
+ "step": 734
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528692816996e-05,
+ "loss": 0.7255,
+ "step": 735
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929290033743026e-05,
+ "loss": 0.7445,
+ "step": 736
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929050986039355e-05,
+ "loss": 0.7926,
+ "step": 737
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9288115498059524e-05,
+ "loss": 0.8144,
+ "step": 738
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9285717251429507e-05,
+ "loss": 0.7812,
+ "step": 739
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928331512150645e-05,
+ "loss": 0.7928,
+ "step": 740
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928090910929492e-05,
+ "loss": 0.8244,
+ "step": 741
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9278499215801117e-05,
+ "loss": 0.7736,
+ "step": 742
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927608544203286e-05,
+ "loss": 0.8126,
+ "step": 743
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927366778899959e-05,
+ "loss": 0.7497,
+ "step": 744
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9271246257712368e-05,
+ "loss": 0.7847,
+ "step": 745
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9268820849183885e-05,
+ "loss": 0.698,
+ "step": 746
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.926639156442845e-05,
+ "loss": 0.8166,
+ "step": 747
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.926395840446198e-05,
+ "loss": 0.7601,
+ "step": 748
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9261521370302035e-05,
+ "loss": 0.8662,
+ "step": 749
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9259080462967784e-05,
+ "loss": 0.72,
+ "step": 750
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925663568348001e-05,
+ "loss": 0.7579,
+ "step": 751
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925418703286112e-05,
+ "loss": 0.7886,
+ "step": 752
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9251734512135155e-05,
+ "loss": 0.7346,
+ "step": 753
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9249278122327748e-05,
+ "loss": 0.7489,
+ "step": 754
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246817864466166e-05,
+ "loss": 0.7784,
+ "step": 755
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.924435373957929e-05,
+ "loss": 0.9041,
+ "step": 756
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241885748697618e-05,
+ "loss": 0.8103,
+ "step": 757
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.923941389285327e-05,
+ "loss": 0.7579,
+ "step": 758
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236938173079974e-05,
+ "loss": 0.8294,
+ "step": 759
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9234458590413076e-05,
+ "loss": 0.7504,
+ "step": 760
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231975145889543e-05,
+ "loss": 0.8479,
+ "step": 761
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9229487840547955e-05,
+ "loss": 0.8351,
+ "step": 762
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9226996675428497e-05,
+ "loss": 0.8996,
+ "step": 763
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.922450165157298e-05,
+ "loss": 0.6891,
+ "step": 764
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222002770024827e-05,
+ "loss": 0.7857,
+ "step": 765
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9219500031829067e-05,
+ "loss": 0.7661,
+ "step": 766
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921699343803235e-05,
+ "loss": 0.782,
+ "step": 767
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9214482989682936e-05,
+ "loss": 0.6971,
+ "step": 768
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9211968687830693e-05,
+ "loss": 0.7433,
+ "step": 769
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9209450533527107e-05,
+ "loss": 0.6422,
+ "step": 770
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9206928527825268e-05,
+ "loss": 0.8908,
+ "step": 771
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9204402671779886e-05,
+ "loss": 0.7259,
+ "step": 772
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920187296644727e-05,
+ "loss": 0.7415,
+ "step": 773
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9199339412885347e-05,
+ "loss": 0.7617,
+ "step": 774
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.919680201215365e-05,
+ "loss": 0.7566,
+ "step": 775
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.919426076531332e-05,
+ "loss": 0.7678,
+ "step": 776
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9191715673427113e-05,
+ "loss": 0.7501,
+ "step": 777
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9189166737559386e-05,
+ "loss": 0.642,
+ "step": 778
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.91866139587761e-05,
+ "loss": 0.6774,
+ "step": 779
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.918405733814484e-05,
+ "loss": 0.7752,
+ "step": 780
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.918149687673478e-05,
+ "loss": 0.7924,
+ "step": 781
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.91789325756167e-05,
+ "loss": 0.7768,
+ "step": 782
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9176364435863005e-05,
+ "loss": 0.7894,
+ "step": 783
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9173792458547687e-05,
+ "loss": 0.693,
+ "step": 784
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9171216644746345e-05,
+ "loss": 0.7583,
+ "step": 785
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.916863699553619e-05,
+ "loss": 0.7262,
+ "step": 786
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9166053511996035e-05,
+ "loss": 0.7251,
+ "step": 787
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9163466195206292e-05,
+ "loss": 0.6858,
+ "step": 788
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160875046248974e-05,
+ "loss": 0.7122,
+ "step": 789
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915828006620771e-05,
+ "loss": 0.7047,
+ "step": 790
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915568125616772e-05,
+ "loss": 0.6521,
+ "step": 791
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915307861721582e-05,
+ "loss": 0.7208,
+ "step": 792
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150472150440443e-05,
+ "loss": 0.6571,
+ "step": 793
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9147861856931618e-05,
+ "loss": 0.8122,
+ "step": 794
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145247737780963e-05,
+ "loss": 0.7236,
+ "step": 795
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.914262979408171e-05,
+ "loss": 0.7019,
+ "step": 796
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9140008026928683e-05,
+ "loss": 0.7679,
+ "step": 797
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.913738243741831e-05,
+ "loss": 0.8039,
+ "step": 798
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9134753026648605e-05,
+ "loss": 0.8782,
+ "step": 799
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9132119795719198e-05,
+ "loss": 0.8473,
+ "step": 800
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9129482745731308e-05,
+ "loss": 0.8601,
+ "step": 801
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9126841877787745e-05,
+ "loss": 0.8361,
+ "step": 802
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9124197192992927e-05,
+ "loss": 0.745,
+ "step": 803
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9121548692452858e-05,
+ "loss": 0.6995,
+ "step": 804
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.911889637727515e-05,
+ "loss": 0.8232,
+ "step": 805
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9116240248568996e-05,
+ "loss": 0.7797,
+ "step": 806
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9113580307445196e-05,
+ "loss": 0.7773,
+ "step": 807
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9110916555016134e-05,
+ "loss": 0.7704,
+ "step": 808
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9108248992395797e-05,
+ "loss": 0.823,
+ "step": 809
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910557762069976e-05,
+ "loss": 0.7977,
+ "step": 810
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910290244104519e-05,
+ "loss": 0.7636,
+ "step": 811
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9100223454550853e-05,
+ "loss": 0.7046,
+ "step": 812
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097540662337102e-05,
+ "loss": 0.7554,
+ "step": 813
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.909485406552588e-05,
+ "loss": 0.7329,
+ "step": 814
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9092163665240726e-05,
+ "loss": 0.8186,
+ "step": 815
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9089469462606766e-05,
+ "loss": 0.7278,
+ "step": 816
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9086771458750716e-05,
+ "loss": 0.7769,
+ "step": 817
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.908406965480089e-05,
+ "loss": 0.6756,
+ "step": 818
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081364051887173e-05,
+ "loss": 0.8231,
+ "step": 819
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.907865465114106e-05,
+ "loss": 0.7943,
+ "step": 820
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9075941453695617e-05,
+ "loss": 0.7705,
+ "step": 821
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9073224460685507e-05,
+ "loss": 0.7708,
+ "step": 822
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9070503673246982e-05,
+ "loss": 0.7331,
+ "step": 823
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9067779092517875e-05,
+ "loss": 0.8083,
+ "step": 824
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9065050719637604e-05,
+ "loss": 0.7991,
+ "step": 825
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.906231855574718e-05,
+ "loss": 0.8181,
+ "step": 826
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9059582601989196e-05,
+ "loss": 0.8561,
+ "step": 827
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9056842859507827e-05,
+ "loss": 0.6142,
+ "step": 828
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9054099329448835e-05,
+ "loss": 0.7761,
+ "step": 829
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.905135201295957e-05,
+ "loss": 0.7839,
+ "step": 830
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904860091118896e-05,
+ "loss": 0.7986,
+ "step": 831
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9045846025287514e-05,
+ "loss": 0.7026,
+ "step": 832
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904308735640733e-05,
+ "loss": 0.7382,
+ "step": 833
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9040324905702085e-05,
+ "loss": 0.7995,
+ "step": 834
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9037558674327036e-05,
+ "loss": 0.7296,
+ "step": 835
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9034788663439027e-05,
+ "loss": 0.8166,
+ "step": 836
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9032014874196476e-05,
+ "loss": 0.7054,
+ "step": 837
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.902923730775938e-05,
+ "loss": 0.6992,
+ "step": 838
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9026455965289326e-05,
+ "loss": 0.773,
+ "step": 839
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9023670847949467e-05,
+ "loss": 0.8089,
+ "step": 840
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020881956904543e-05,
+ "loss": 0.811,
+ "step": 841
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9018089293320872e-05,
+ "loss": 0.7633,
+ "step": 842
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901529285836635e-05,
+ "loss": 0.7583,
+ "step": 843
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901249265321044e-05,
+ "loss": 0.6704,
+ "step": 844
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900968867902419e-05,
+ "loss": 0.7367,
+ "step": 845
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9006880936980234e-05,
+ "loss": 0.7926,
+ "step": 846
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004069428252765e-05,
+ "loss": 0.7003,
+ "step": 847
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9001254154017554e-05,
+ "loss": 0.8632,
+ "step": 848
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899843511545196e-05,
+ "loss": 0.7215,
+ "step": 849
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8995612313734896e-05,
+ "loss": 0.7929,
+ "step": 850
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8992785750046866e-05,
+ "loss": 0.8726,
+ "step": 851
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.898995542556994e-05,
+ "loss": 0.7669,
+ "step": 852
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.898712134148776e-05,
+ "loss": 0.7352,
+ "step": 853
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8984283498985548e-05,
+ "loss": 0.7663,
+ "step": 854
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8981441899250082e-05,
+ "loss": 0.844,
+ "step": 855
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8978596543469726e-05,
+ "loss": 0.868,
+ "step": 856
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897574743283441e-05,
+ "loss": 0.7632,
+ "step": 857
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8972894568535632e-05,
+ "loss": 0.7207,
+ "step": 858
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8970037951766468e-05,
+ "loss": 0.7182,
+ "step": 859
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896717758372154e-05,
+ "loss": 0.8471,
+ "step": 860
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896431346559708e-05,
+ "loss": 0.7616,
+ "step": 861
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8961445598590846e-05,
+ "loss": 0.6738,
+ "step": 862
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895857398390219e-05,
+ "loss": 0.7838,
+ "step": 863
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8955698622732016e-05,
+ "loss": 0.7715,
+ "step": 864
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895281951628281e-05,
+ "loss": 0.8702,
+ "step": 865
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894993666575861e-05,
+ "loss": 0.8231,
+ "step": 866
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8947050072365033e-05,
+ "loss": 0.7557,
+ "step": 867
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.894415973730925e-05,
+ "loss": 0.7416,
+ "step": 868
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89412656618e-05,
+ "loss": 0.8276,
+ "step": 869
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8938367847047585e-05,
+ "loss": 0.724,
+ "step": 870
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935466294263876e-05,
+ "loss": 0.7465,
+ "step": 871
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8932561004662312e-05,
+ "loss": 0.6924,
+ "step": 872
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8929651979457874e-05,
+ "loss": 0.838,
+ "step": 873
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8926739219867127e-05,
+ "loss": 0.7551,
+ "step": 874
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.892382272710818e-05,
+ "loss": 0.8141,
+ "step": 875
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8920902502400726e-05,
+ "loss": 0.8288,
+ "step": 876
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891797854696599e-05,
+ "loss": 0.7163,
+ "step": 877
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891505086202678e-05,
+ "loss": 0.7613,
+ "step": 878
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891211944880746e-05,
+ "loss": 0.7844,
+ "step": 879
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8909184308533938e-05,
+ "loss": 0.6475,
+ "step": 880
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.89062454424337e-05,
+ "loss": 0.7708,
+ "step": 881
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8903302851735777e-05,
+ "loss": 0.7857,
+ "step": 882
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8900356537670768e-05,
+ "loss": 0.7868,
+ "step": 883
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.889740650147082e-05,
+ "loss": 0.7681,
+ "step": 884
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8894452744369632e-05,
+ "loss": 0.7669,
+ "step": 885
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8891495267602482e-05,
+ "loss": 0.7468,
+ "step": 886
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8888534072406182e-05,
+ "loss": 0.6183,
+ "step": 887
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8885569160019102e-05,
+ "loss": 0.7173,
+ "step": 888
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.888260053168118e-05,
+ "loss": 0.727,
+ "step": 889
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8879628188633886e-05,
+ "loss": 0.8525,
+ "step": 890
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876652132120267e-05,
+ "loss": 0.861,
+ "step": 891
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8873672363384902e-05,
+ "loss": 0.7548,
+ "step": 892
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.887068888367394e-05,
+ "loss": 0.8283,
+ "step": 893
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8867701694235066e-05,
+ "loss": 0.7408,
+ "step": 894
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864710796317537e-05,
+ "loss": 0.774,
+ "step": 895
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8861716191172138e-05,
+ "loss": 0.7085,
+ "step": 896
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885871788005122e-05,
+ "loss": 0.7802,
+ "step": 897
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8855715864208672e-05,
+ "loss": 0.7385,
+ "step": 898
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8852710144899948e-05,
+ "loss": 0.7923,
+ "step": 899
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8849700723382038e-05,
+ "loss": 0.849,
+ "step": 900
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884668760091348e-05,
+ "loss": 0.7661,
+ "step": 901
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8843670778754372e-05,
+ "loss": 0.8484,
+ "step": 902
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8840650258166343e-05,
+ "loss": 0.6825,
+ "step": 903
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883762604041258e-05,
+ "loss": 0.7458,
+ "step": 904
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8834598126757812e-05,
+ "loss": 0.6884,
+ "step": 905
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.883156651846832e-05,
+ "loss": 0.7911,
+ "step": 906
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8828531216811912e-05,
+ "loss": 0.7359,
+ "step": 907
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882549222305797e-05,
+ "loss": 0.6841,
+ "step": 908
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882244953847739e-05,
+ "loss": 0.8303,
+ "step": 909
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881940316434263e-05,
+ "loss": 0.8304,
+ "step": 910
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8816353101927686e-05,
+ "loss": 0.7541,
+ "step": 911
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881329935250809e-05,
+ "loss": 0.778,
+ "step": 912
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881024191736093e-05,
+ "loss": 0.8085,
+ "step": 913
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8807180797764822e-05,
+ "loss": 0.7939,
+ "step": 914
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8804115994999933e-05,
+ "loss": 0.8144,
+ "step": 915
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8801047510347966e-05,
+ "loss": 0.6324,
+ "step": 916
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797975345092153e-05,
+ "loss": 0.7734,
+ "step": 917
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8794899500517284e-05,
+ "loss": 0.7071,
+ "step": 918
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791819977909682e-05,
+ "loss": 0.8434,
+ "step": 919
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.87887367785572e-05,
+ "loss": 0.7579,
+ "step": 920
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785649903749236e-05,
+ "loss": 0.7789,
+ "step": 921
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.878255935477672e-05,
+ "loss": 0.7586,
+ "step": 922
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8779465132932124e-05,
+ "loss": 0.7546,
+ "step": 923
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8776367239509454e-05,
+ "loss": 0.7803,
+ "step": 924
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877326567580425e-05,
+ "loss": 0.7466,
+ "step": 925
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877016044311359e-05,
+ "loss": 0.6828,
+ "step": 926
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8767051542736084e-05,
+ "loss": 0.7268,
+ "step": 927
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8763938975971873e-05,
+ "loss": 0.7767,
+ "step": 928
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8760822744122637e-05,
+ "loss": 0.7462,
+ "step": 929
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8757702848491586e-05,
+ "loss": 0.8592,
+ "step": 930
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8754579290383465e-05,
+ "loss": 0.8006,
+ "step": 931
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.875145207110454e-05,
+ "loss": 0.7771,
+ "step": 932
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8748321191962625e-05,
+ "loss": 0.7734,
+ "step": 933
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.874518665426705e-05,
+ "loss": 0.7493,
+ "step": 934
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8742048459328684e-05,
+ "loss": 0.6632,
+ "step": 935
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.873890660845992e-05,
+ "loss": 0.7595,
+ "step": 936
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8735761102974684e-05,
+ "loss": 0.7711,
+ "step": 937
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8732611944188427e-05,
+ "loss": 0.8979,
+ "step": 938
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872945913341813e-05,
+ "loss": 0.7979,
+ "step": 939
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8726302671982302e-05,
+ "loss": 0.6771,
+ "step": 940
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8723142561200973e-05,
+ "loss": 0.7671,
+ "step": 941
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8719978802395707e-05,
+ "loss": 0.711,
+ "step": 942
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.871681139688959e-05,
+ "loss": 0.7733,
+ "step": 943
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8713640346007232e-05,
+ "loss": 0.8056,
+ "step": 944
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710465651074766e-05,
+ "loss": 0.6836,
+ "step": 945
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8707287313419854e-05,
+ "loss": 0.8282,
+ "step": 946
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8704105334371683e-05,
+ "loss": 0.7447,
+ "step": 947
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.870091971526095e-05,
+ "loss": 0.7356,
+ "step": 948
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697730457419893e-05,
+ "loss": 0.6998,
+ "step": 949
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8694537562182255e-05,
+ "loss": 0.7743,
+ "step": 950
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691341030883306e-05,
+ "loss": 0.7673,
+ "step": 951
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8688140864859844e-05,
+ "loss": 0.7978,
+ "step": 952
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868493706545017e-05,
+ "loss": 0.7032,
+ "step": 953
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8681729633994126e-05,
+ "loss": 0.7511,
+ "step": 954
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8678518571833054e-05,
+ "loss": 0.7783,
+ "step": 955
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8675303880309828e-05,
+ "loss": 0.6539,
+ "step": 956
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.867208556076883e-05,
+ "loss": 0.8461,
+ "step": 957
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866886361455597e-05,
+ "loss": 0.7634,
+ "step": 958
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866563804301866e-05,
+ "loss": 0.7511,
+ "step": 959
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8662408847505834e-05,
+ "loss": 0.7603,
+ "step": 960
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8659176029367957e-05,
+ "loss": 0.8101,
+ "step": 961
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8655939589956983e-05,
+ "loss": 0.8015,
+ "step": 962
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8652699530626398e-05,
+ "loss": 0.6873,
+ "step": 963
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8649455852731197e-05,
+ "loss": 0.8349,
+ "step": 964
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8646208557627888e-05,
+ "loss": 0.7259,
+ "step": 965
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8642957646674493e-05,
+ "loss": 0.7853,
+ "step": 966
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8639703121230542e-05,
+ "loss": 0.8345,
+ "step": 967
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.863644498265708e-05,
+ "loss": 0.8469,
+ "step": 968
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8633183232316668e-05,
+ "loss": 0.7471,
+ "step": 969
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8629917871573368e-05,
+ "loss": 0.7316,
+ "step": 970
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626648901792753e-05,
+ "loss": 0.7203,
+ "step": 971
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8623376324341915e-05,
+ "loss": 0.7829,
+ "step": 972
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8620100140589443e-05,
+ "loss": 0.7829,
+ "step": 973
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.861682035190544e-05,
+ "loss": 0.7315,
+ "step": 974
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8613536959661518e-05,
+ "loss": 0.7979,
+ "step": 975
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8610249965230792e-05,
+ "loss": 0.738,
+ "step": 976
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8606959369987885e-05,
+ "loss": 0.794,
+ "step": 977
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8603665175308925e-05,
+ "loss": 0.7712,
+ "step": 978
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.860036738257155e-05,
+ "loss": 0.8446,
+ "step": 979
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.85970659931549e-05,
+ "loss": 0.7798,
+ "step": 980
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8593761008439614e-05,
+ "loss": 0.6885,
+ "step": 981
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8590452429807838e-05,
+ "loss": 0.7674,
+ "step": 982
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8587140258643225e-05,
+ "loss": 0.7784,
+ "step": 983
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8583824496330925e-05,
+ "loss": 0.8094,
+ "step": 984
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8580505144257595e-05,
+ "loss": 0.806,
+ "step": 985
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8577182203811387e-05,
+ "loss": 0.6475,
+ "step": 986
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857385567638196e-05,
+ "loss": 0.8072,
+ "step": 987
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8570525563360468e-05,
+ "loss": 0.7932,
+ "step": 988
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8567191866139564e-05,
+ "loss": 0.7918,
+ "step": 989
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8563854586113407e-05,
+ "loss": 0.7884,
+ "step": 990
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8560513724677644e-05,
+ "loss": 0.7698,
+ "step": 991
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855716928322943e-05,
+ "loss": 0.7121,
+ "step": 992
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8553821263167417e-05,
+ "loss": 0.7502,
+ "step": 993
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.855046966589174e-05,
+ "loss": 0.8119,
+ "step": 994
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8547114492804042e-05,
+ "loss": 0.7707,
+ "step": 995
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8543755745307462e-05,
+ "loss": 0.7735,
+ "step": 996
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8540393424806627e-05,
+ "loss": 0.7662,
+ "step": 997
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853702753270766e-05,
+ "loss": 0.7411,
+ "step": 998
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8533658070418186e-05,
+ "loss": 0.7541,
+ "step": 999
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.853028503934731e-05,
+ "loss": 0.8063,
+ "step": 1000
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.852690844090564e-05,
+ "loss": 0.8111,
+ "step": 1001
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8523528276505266e-05,
+ "loss": 0.7993,
+ "step": 1002
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520144547559783e-05,
+ "loss": 0.8091,
+ "step": 1003
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8516757255484264e-05,
+ "loss": 0.8534,
+ "step": 1004
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513366401695277e-05,
+ "loss": 0.7781,
+ "step": 1005
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8509971987610877e-05,
+ "loss": 0.8008,
+ "step": 1006
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8506574014650613e-05,
+ "loss": 0.647,
+ "step": 1007
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.850317248423552e-05,
+ "loss": 0.7666,
+ "step": 1008
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849976739778812e-05,
+ "loss": 0.7074,
+ "step": 1009
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849635875673242e-05,
+ "loss": 0.7674,
+ "step": 1010
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849294656249392e-05,
+ "loss": 0.7609,
+ "step": 1011
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8489530816499597e-05,
+ "loss": 0.8831,
+ "step": 1012
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8486111520177925e-05,
+ "loss": 0.8187,
+ "step": 1013
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8482688674958847e-05,
+ "loss": 0.7197,
+ "step": 1014
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8479262282273805e-05,
+ "loss": 0.7842,
+ "step": 1015
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8475832343555717e-05,
+ "loss": 0.8478,
+ "step": 1016
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8472398860238988e-05,
+ "loss": 0.8708,
+ "step": 1017
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8468961833759503e-05,
+ "loss": 0.7939,
+ "step": 1018
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8465521265554623e-05,
+ "loss": 0.6695,
+ "step": 1019
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8462077157063203e-05,
+ "loss": 0.7425,
+ "step": 1020
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8458629509725565e-05,
+ "loss": 0.8634,
+ "step": 1021
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8455178324983522e-05,
+ "loss": 0.7717,
+ "step": 1022
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845172360428036e-05,
+ "loss": 0.8125,
+ "step": 1023
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8448265349060846e-05,
+ "loss": 0.7299,
+ "step": 1024
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444803560771222e-05,
+ "loss": 0.7416,
+ "step": 1025
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8441338240859216e-05,
+ "loss": 0.8012,
+ "step": 1026
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8437869390774022e-05,
+ "loss": 0.7257,
+ "step": 1027
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8434397011966315e-05,
+ "loss": 0.7814,
+ "step": 1028
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8430921105888254e-05,
+ "loss": 0.6628,
+ "step": 1029
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8427441673993455e-05,
+ "loss": 0.773,
+ "step": 1030
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.842395871773702e-05,
+ "loss": 0.7846,
+ "step": 1031
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.842047223857553e-05,
+ "loss": 0.7947,
+ "step": 1032
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841698223796703e-05,
+ "loss": 0.7325,
+ "step": 1033
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841348871737104e-05,
+ "loss": 0.8078,
+ "step": 1034
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8409991678248548e-05,
+ "loss": 0.6045,
+ "step": 1035
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8406491122062028e-05,
+ "loss": 0.7658,
+ "step": 1036
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8402987050275408e-05,
+ "loss": 0.7559,
+ "step": 1037
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8399479464354092e-05,
+ "loss": 0.6697,
+ "step": 1038
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.839596836576496e-05,
+ "loss": 0.7939,
+ "step": 1039
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.839245375597635e-05,
+ "loss": 0.7184,
+ "step": 1040
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838893563645808e-05,
+ "loss": 0.7103,
+ "step": 1041
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8385414008681427e-05,
+ "loss": 0.7512,
+ "step": 1042
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8381888874119135e-05,
+ "loss": 0.8529,
+ "step": 1043
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8378360234245424e-05,
+ "loss": 0.7035,
+ "step": 1044
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837482809053597e-05,
+ "loss": 0.7616,
+ "step": 1045
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8371292444467918e-05,
+ "loss": 0.8238,
+ "step": 1046
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8367753297519874e-05,
+ "loss": 0.6791,
+ "step": 1047
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.836421065117192e-05,
+ "loss": 0.7794,
+ "step": 1048
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360664506905587e-05,
+ "loss": 0.7356,
+ "step": 1049
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.835711486620387e-05,
+ "loss": 0.7204,
+ "step": 1050
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353561730551243e-05,
+ "loss": 0.7853,
+ "step": 1051
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8350005101433624e-05,
+ "loss": 0.7827,
+ "step": 1052
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346444980338394e-05,
+ "loss": 0.7278,
+ "step": 1053
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8342881368754403e-05,
+ "loss": 0.8263,
+ "step": 1054
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339314268171953e-05,
+ "loss": 0.7549,
+ "step": 1055
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.833574368008281e-05,
+ "loss": 0.76,
+ "step": 1056
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8332169605980193e-05,
+ "loss": 0.7457,
+ "step": 1057
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8328592047358785e-05,
+ "loss": 0.7742,
+ "step": 1058
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8325011005714726e-05,
+ "loss": 0.6311,
+ "step": 1059
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8321426482545605e-05,
+ "loss": 0.7137,
+ "step": 1060
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8317838479350473e-05,
+ "loss": 0.8268,
+ "step": 1061
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8314246997629836e-05,
+ "loss": 0.7229,
+ "step": 1062
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.831065203888565e-05,
+ "loss": 0.8905,
+ "step": 1063
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8307053604621338e-05,
+ "loss": 0.7046,
+ "step": 1064
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830345169634176e-05,
+ "loss": 0.7024,
+ "step": 1065
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8299846315553237e-05,
+ "loss": 0.6985,
+ "step": 1066
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8296237463763543e-05,
+ "loss": 0.6518,
+ "step": 1067
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.829262514248191e-05,
+ "loss": 0.7876,
+ "step": 1068
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8289009353218995e-05,
+ "loss": 0.7064,
+ "step": 1069
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8285390097486942e-05,
+ "loss": 0.6988,
+ "step": 1070
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8281767376799315e-05,
+ "loss": 0.7656,
+ "step": 1071
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8278141192671145e-05,
+ "loss": 0.728,
+ "step": 1072
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.82745115466189e-05,
+ "loss": 0.6924,
+ "step": 1073
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8270878440160496e-05,
+ "loss": 0.7954,
+ "step": 1074
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8267241874815312e-05,
+ "loss": 0.7522,
+ "step": 1075
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8263601852104152e-05,
+ "loss": 0.8078,
+ "step": 1076
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8259958373549285e-05,
+ "loss": 0.7274,
+ "step": 1077
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8256311440674407e-05,
+ "loss": 0.8167,
+ "step": 1078
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8252661055004672e-05,
+ "loss": 0.7979,
+ "step": 1079
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8249007218066674e-05,
+ "loss": 0.7812,
+ "step": 1080
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824534993138845e-05,
+ "loss": 0.698,
+ "step": 1081
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8241689196499475e-05,
+ "loss": 0.6768,
+ "step": 1082
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823802501493068e-05,
+ "loss": 0.7525,
+ "step": 1083
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823435738821442e-05,
+ "loss": 0.6681,
+ "step": 1084
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8230686317884502e-05,
+ "loss": 0.7599,
+ "step": 1085
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.822701180547617e-05,
+ "loss": 0.811,
+ "step": 1086
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8223333852526107e-05,
+ "loss": 0.7851,
+ "step": 1087
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8219652460572437e-05,
+ "loss": 0.8404,
+ "step": 1088
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8215967631154718e-05,
+ "loss": 0.6651,
+ "step": 1089
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.821227936581395e-05,
+ "loss": 0.6442,
+ "step": 1090
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8208587666092566e-05,
+ "loss": 0.7393,
+ "step": 1091
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.820489253353444e-05,
+ "loss": 0.7532,
+ "step": 1092
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8201193969684875e-05,
+ "loss": 0.6284,
+ "step": 1093
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819749197609062e-05,
+ "loss": 0.6704,
+ "step": 1094
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819378655429984e-05,
+ "loss": 0.771,
+ "step": 1095
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8190077705862155e-05,
+ "loss": 0.7851,
+ "step": 1096
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8186365432328605e-05,
+ "loss": 0.7401,
+ "step": 1097
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.818264973525166e-05,
+ "loss": 0.8184,
+ "step": 1098
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8178930616185236e-05,
+ "loss": 0.7038,
+ "step": 1099
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8175208076684664e-05,
+ "loss": 0.6355,
+ "step": 1100
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8171482118306712e-05,
+ "loss": 0.7389,
+ "step": 1101
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8167752742609585e-05,
+ "loss": 0.7054,
+ "step": 1102
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8164019951152903e-05,
+ "loss": 0.6794,
+ "step": 1103
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8160283745497725e-05,
+ "loss": 0.7975,
+ "step": 1104
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156544127206536e-05,
+ "loss": 0.7066,
+ "step": 1105
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.815280109784325e-05,
+ "loss": 0.6484,
+ "step": 1106
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149054658973194e-05,
+ "loss": 0.8113,
+ "step": 1107
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8145304812163137e-05,
+ "loss": 0.6978,
+ "step": 1108
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.814155155898127e-05,
+ "loss": 0.761,
+ "step": 1109
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8137794900997204e-05,
+ "loss": 0.8179,
+ "step": 1110
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134034839781972e-05,
+ "loss": 0.7637,
+ "step": 1111
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.813027137690804e-05,
+ "loss": 0.8215,
+ "step": 1112
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8126504513949287e-05,
+ "loss": 0.7518,
+ "step": 1113
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.812273425248102e-05,
+ "loss": 0.7099,
+ "step": 1114
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811896059407996e-05,
+ "loss": 0.7956,
+ "step": 1115
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811518354032426e-05,
+ "loss": 0.7585,
+ "step": 1116
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811140309279348e-05,
+ "loss": 0.7648,
+ "step": 1117
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.810761925306861e-05,
+ "loss": 0.7486,
+ "step": 1118
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8103832022732056e-05,
+ "loss": 0.7476,
+ "step": 1119
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8100041403367635e-05,
+ "loss": 0.806,
+ "step": 1120
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8096247396560585e-05,
+ "loss": 0.7425,
+ "step": 1121
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.809245000389757e-05,
+ "loss": 0.7315,
+ "step": 1122
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8088649226966658e-05,
+ "loss": 0.7312,
+ "step": 1123
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8084845067357334e-05,
+ "loss": 0.7593,
+ "step": 1124
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8081037526660505e-05,
+ "loss": 0.7257,
+ "step": 1125
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8077226606468487e-05,
+ "loss": 0.76,
+ "step": 1126
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8073412308375e-05,
+ "loss": 0.7762,
+ "step": 1127
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8069594633975197e-05,
+ "loss": 0.6891,
+ "step": 1128
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065773584865623e-05,
+ "loss": 0.8263,
+ "step": 1129
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8061949162644256e-05,
+ "loss": 0.8209,
+ "step": 1130
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805812136891046e-05,
+ "loss": 0.6868,
+ "step": 1131
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8054290205265018e-05,
+ "loss": 0.7798,
+ "step": 1132
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050455673310142e-05,
+ "loss": 0.7306,
+ "step": 1133
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.804661777464942e-05,
+ "loss": 0.7785,
+ "step": 1134
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.804277651088787e-05,
+ "loss": 0.7888,
+ "step": 1135
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.803893188363191e-05,
+ "loss": 0.8298,
+ "step": 1136
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8035083894489364e-05,
+ "loss": 0.7472,
+ "step": 1137
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8031232545069468e-05,
+ "loss": 0.7292,
+ "step": 1138
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8027377836982857e-05,
+ "loss": 0.7631,
+ "step": 1139
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.802351977184157e-05,
+ "loss": 0.7778,
+ "step": 1140
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8019658351259056e-05,
+ "loss": 0.8064,
+ "step": 1141
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.801579357685016e-05,
+ "loss": 0.69,
+ "step": 1142
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8011925450231135e-05,
+ "loss": 0.799,
+ "step": 1143
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8008053973019638e-05,
+ "loss": 0.7279,
+ "step": 1144
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8004179146834712e-05,
+ "loss": 0.813,
+ "step": 1145
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8000300973296826e-05,
+ "loss": 0.8076,
+ "step": 1146
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7996419454027828e-05,
+ "loss": 0.7896,
+ "step": 1147
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.799253459065097e-05,
+ "loss": 0.7062,
+ "step": 1148
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.79886463847909e-05,
+ "loss": 0.7279,
+ "step": 1149
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7984754838073685e-05,
+ "loss": 0.7709,
+ "step": 1150
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7980859952126755e-05,
+ "loss": 0.7202,
+ "step": 1151
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7976961728578965e-05,
+ "loss": 0.792,
+ "step": 1152
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7973060169060546e-05,
+ "loss": 0.7438,
+ "step": 1153
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796915527520314e-05,
+ "loss": 0.8379,
+ "step": 1154
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796524704863977e-05,
+ "loss": 0.7814,
+ "step": 1155
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7961335491004867e-05,
+ "loss": 0.6936,
+ "step": 1156
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795742060393424e-05,
+ "loss": 0.7284,
+ "step": 1157
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7953502389065098e-05,
+ "loss": 0.7353,
+ "step": 1158
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7949580848036045e-05,
+ "loss": 0.6938,
+ "step": 1159
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7945655982487068e-05,
+ "loss": 0.5587,
+ "step": 1160
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941727794059553e-05,
+ "loss": 0.8166,
+ "step": 1161
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7937796284396264e-05,
+ "loss": 0.7739,
+ "step": 1162
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7933861455141365e-05,
+ "loss": 0.7088,
+ "step": 1163
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7929923307940406e-05,
+ "loss": 0.7137,
+ "step": 1164
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792598184444032e-05,
+ "loss": 0.696,
+ "step": 1165
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7922037066289432e-05,
+ "loss": 0.6836,
+ "step": 1166
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7918088975137445e-05,
+ "loss": 0.7928,
+ "step": 1167
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.791413757263546e-05,
+ "loss": 0.7516,
+ "step": 1168
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7910182860435954e-05,
+ "loss": 0.8001,
+ "step": 1169
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.790622484019279e-05,
+ "loss": 0.7689,
+ "step": 1170
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7902263513561214e-05,
+ "loss": 0.8287,
+ "step": 1171
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7898298882197854e-05,
+ "loss": 0.752,
+ "step": 1172
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7894330947760728e-05,
+ "loss": 0.8386,
+ "step": 1173
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7890359711909222e-05,
+ "loss": 0.7983,
+ "step": 1174
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.788638517630411e-05,
+ "loss": 0.7875,
+ "step": 1175
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7882407342607547e-05,
+ "loss": 0.7106,
+ "step": 1176
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7878426212483066e-05,
+ "loss": 0.724,
+ "step": 1177
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7874441787595576e-05,
+ "loss": 0.7989,
+ "step": 1178
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7870454069611375e-05,
+ "loss": 0.7751,
+ "step": 1179
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7866463060198114e-05,
+ "loss": 0.8493,
+ "step": 1180
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786246876102485e-05,
+ "loss": 0.7216,
+ "step": 1181
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.785847117376199e-05,
+ "loss": 0.6311,
+ "step": 1182
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7854470300081338e-05,
+ "loss": 0.7517,
+ "step": 1183
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7850466141656057e-05,
+ "loss": 0.7304,
+ "step": 1184
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846458700160685e-05,
+ "loss": 0.6444,
+ "step": 1185
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.784244797727114e-05,
+ "loss": 0.6496,
+ "step": 1186
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838433974664714e-05,
+ "loss": 0.8598,
+ "step": 1187
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7834416694020057e-05,
+ "loss": 0.7715,
+ "step": 1188
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830396137017202e-05,
+ "loss": 0.8455,
+ "step": 1189
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7826372305337552e-05,
+ "loss": 0.7332,
+ "step": 1190
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7822345200663874e-05,
+ "loss": 0.7737,
+ "step": 1191
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.78183148246803e-05,
+ "loss": 0.7535,
+ "step": 1192
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7814281179072345e-05,
+ "loss": 0.7019,
+ "step": 1193
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7810244265526873e-05,
+ "loss": 0.7675,
+ "step": 1194
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.780620408573213e-05,
+ "loss": 0.6886,
+ "step": 1195
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7802160641377723e-05,
+ "loss": 0.7264,
+ "step": 1196
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7798113934154623e-05,
+ "loss": 0.7489,
+ "step": 1197
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7794063965755157e-05,
+ "loss": 0.8437,
+ "step": 1198
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7790010737873035e-05,
+ "loss": 0.7061,
+ "step": 1199
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778595425220331e-05,
+ "loss": 0.7381,
+ "step": 1200
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778189451044242e-05,
+ "loss": 0.7643,
+ "step": 1201
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777783151428814e-05,
+ "loss": 0.7473,
+ "step": 1202
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777376526543962e-05,
+ "loss": 0.7858,
+ "step": 1203
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7769695765597373e-05,
+ "loss": 0.842,
+ "step": 1204
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776562301646326e-05,
+ "loss": 0.8145,
+ "step": 1205
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776154701974051e-05,
+ "loss": 0.6851,
+ "step": 1206
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.775746777713371e-05,
+ "loss": 0.7339,
+ "step": 1207
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.77533852903488e-05,
+ "loss": 0.7805,
+ "step": 1208
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7749299561093076e-05,
+ "loss": 0.7627,
+ "step": 1209
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7745210591075198e-05,
+ "loss": 0.7586,
+ "step": 1210
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7741118382005172e-05,
+ "loss": 0.7866,
+ "step": 1211
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7737022935594362e-05,
+ "loss": 0.7315,
+ "step": 1212
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732924253555487e-05,
+ "loss": 0.7479,
+ "step": 1213
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7728822337602625e-05,
+ "loss": 0.6625,
+ "step": 1214
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.772471718945119e-05,
+ "loss": 0.6434,
+ "step": 1215
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7720608810817962e-05,
+ "loss": 0.7512,
+ "step": 1216
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7716497203421072e-05,
+ "loss": 0.7558,
+ "step": 1217
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7712382368979987e-05,
+ "loss": 0.6911,
+ "step": 1218
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7708264309215543e-05,
+ "loss": 0.7034,
+ "step": 1219
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7704143025849907e-05,
+ "loss": 0.7112,
+ "step": 1220
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770001852060661e-05,
+ "loss": 0.7104,
+ "step": 1221
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7695890795210517e-05,
+ "loss": 0.8123,
+ "step": 1222
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7691759851387852e-05,
+ "loss": 0.7715,
+ "step": 1223
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.768762569086617e-05,
+ "loss": 0.8117,
+ "step": 1224
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7683488315374386e-05,
+ "loss": 0.6724,
+ "step": 1225
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7679347726642752e-05,
+ "loss": 0.7527,
+ "step": 1226
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7675203926402863e-05,
+ "loss": 0.7674,
+ "step": 1227
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.767105691638766e-05,
+ "loss": 0.7519,
+ "step": 1228
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7666906698331426e-05,
+ "loss": 0.8198,
+ "step": 1229
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7662753273969786e-05,
+ "loss": 0.7681,
+ "step": 1230
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76585966450397e-05,
+ "loss": 0.6802,
+ "step": 1231
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7654436813279477e-05,
+ "loss": 0.701,
+ "step": 1232
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765027378042876e-05,
+ "loss": 0.6974,
+ "step": 1233
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7646107548228536e-05,
+ "loss": 0.682,
+ "step": 1234
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.764193811842112e-05,
+ "loss": 0.7015,
+ "step": 1235
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7637765492750172e-05,
+ "loss": 0.8293,
+ "step": 1236
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763358967296069e-05,
+ "loss": 0.7684,
+ "step": 1237
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7629410660799e-05,
+ "loss": 0.7894,
+ "step": 1238
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.762522845801277e-05,
+ "loss": 0.8083,
+ "step": 1239
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7621043066351e-05,
+ "loss": 0.7741,
+ "step": 1240
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7616854487564024e-05,
+ "loss": 0.7706,
+ "step": 1241
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7612662723403506e-05,
+ "loss": 0.781,
+ "step": 1242
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7608467775622448e-05,
+ "loss": 0.7443,
+ "step": 1243
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7604269645975175e-05,
+ "loss": 0.6839,
+ "step": 1244
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600068336217355e-05,
+ "loss": 0.7835,
+ "step": 1245
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759586384810597e-05,
+ "loss": 0.7377,
+ "step": 1246
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759165618339935e-05,
+ "loss": 0.7998,
+ "step": 1247
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.758744534385713e-05,
+ "loss": 0.6759,
+ "step": 1248
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7583231331240297e-05,
+ "loss": 0.7102,
+ "step": 1249
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757901414731115e-05,
+ "loss": 0.7988,
+ "step": 1250
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7574793793833312e-05,
+ "loss": 0.7721,
+ "step": 1251
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.757057027257175e-05,
+ "loss": 0.775,
+ "step": 1252
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7566343585292732e-05,
+ "loss": 0.7654,
+ "step": 1253
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7562113733763868e-05,
+ "loss": 0.7982,
+ "step": 1254
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7557880719754082e-05,
+ "loss": 0.6835,
+ "step": 1255
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7553644545033627e-05,
+ "loss": 0.7066,
+ "step": 1256
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7549405211374072e-05,
+ "loss": 0.6961,
+ "step": 1257
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7545162720548304e-05,
+ "loss": 0.7781,
+ "step": 1258
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7540917074330543e-05,
+ "loss": 0.8361,
+ "step": 1259
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7536668274496317e-05,
+ "loss": 0.7445,
+ "step": 1260
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7532416322822473e-05,
+ "loss": 0.7601,
+ "step": 1261
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.752816122108719e-05,
+ "loss": 0.8046,
+ "step": 1262
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7523902971069944e-05,
+ "loss": 0.6961,
+ "step": 1263
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7519641574551548e-05,
+ "loss": 0.7151,
+ "step": 1264
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515377033314113e-05,
+ "loss": 0.7619,
+ "step": 1265
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7511109349141072e-05,
+ "loss": 0.6815,
+ "step": 1266
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7506838523817183e-05,
+ "loss": 0.7908,
+ "step": 1267
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.75025645591285e-05,
+ "loss": 0.8561,
+ "step": 1268
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7498287456862398e-05,
+ "loss": 0.7183,
+ "step": 1269
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7494007218807568e-05,
+ "loss": 0.8092,
+ "step": 1270
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7489723846754e-05,
+ "loss": 0.7668,
+ "step": 1271
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7485437342493017e-05,
+ "loss": 0.6592,
+ "step": 1272
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7481147707817227e-05,
+ "loss": 0.6918,
+ "step": 1273
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.747685494452056e-05,
+ "loss": 0.6835,
+ "step": 1274
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7472559054398256e-05,
+ "loss": 0.7846,
+ "step": 1275
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7468260039246852e-05,
+ "loss": 0.7514,
+ "step": 1276
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.746395790086421e-05,
+ "loss": 0.7367,
+ "step": 1277
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7459652641049473e-05,
+ "loss": 0.6817,
+ "step": 1278
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7455344261603116e-05,
+ "loss": 0.7662,
+ "step": 1279
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.74510327643269e-05,
+ "loss": 0.6066,
+ "step": 1280
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7446718151023894e-05,
+ "loss": 0.7229,
+ "step": 1281
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7442400423498477e-05,
+ "loss": 0.8181,
+ "step": 1282
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7438079583556326e-05,
+ "loss": 0.6709,
+ "step": 1283
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7433755633004414e-05,
+ "loss": 0.7567,
+ "step": 1284
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7429428573651023e-05,
+ "loss": 0.6823,
+ "step": 1285
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7425098407305735e-05,
+ "loss": 0.6637,
+ "step": 1286
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.742076513577942e-05,
+ "loss": 0.7689,
+ "step": 1287
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.741642876088426e-05,
+ "loss": 0.7216,
+ "step": 1288
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7412089284433734e-05,
+ "loss": 0.7191,
+ "step": 1289
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7407746708242606e-05,
+ "loss": 0.7809,
+ "step": 1290
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403401034126954e-05,
+ "loss": 0.7626,
+ "step": 1291
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.739905226390413e-05,
+ "loss": 0.74,
+ "step": 1292
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394700399392804e-05,
+ "loss": 0.7404,
+ "step": 1293
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.739034544241292e-05,
+ "loss": 0.8039,
+ "step": 1294
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7385987394785723e-05,
+ "loss": 0.7886,
+ "step": 1295
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.738162625833376e-05,
+ "loss": 0.83,
+ "step": 1296
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377262034880854e-05,
+ "loss": 0.7622,
+ "step": 1297
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.737289472625213e-05,
+ "loss": 0.7373,
+ "step": 1298
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7368524334273998e-05,
+ "loss": 0.8,
+ "step": 1299
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7364150860774158e-05,
+ "loss": 0.6796,
+ "step": 1300
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7359774307581602e-05,
+ "loss": 0.6707,
+ "step": 1301
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.73553946765266e-05,
+ "loss": 0.8122,
+ "step": 1302
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7351011969440728e-05,
+ "loss": 0.758,
+ "step": 1303
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7346626188156828e-05,
+ "loss": 0.8516,
+ "step": 1304
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.734223733450904e-05,
+ "loss": 0.7302,
+ "step": 1305
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7337845410332784e-05,
+ "loss": 0.8391,
+ "step": 1306
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733345041746477e-05,
+ "loss": 0.6612,
+ "step": 1307
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.732905235774298e-05,
+ "loss": 0.809,
+ "step": 1308
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.732465123300669e-05,
+ "loss": 0.6407,
+ "step": 1309
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7320247045096453e-05,
+ "loss": 0.7702,
+ "step": 1310
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7315839795854103e-05,
+ "loss": 0.7146,
+ "step": 1311
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7311429487122754e-05,
+ "loss": 0.8056,
+ "step": 1312
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.73070161207468e-05,
+ "loss": 0.7479,
+ "step": 1313
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7302599698571914e-05,
+ "loss": 0.7566,
+ "step": 1314
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7298180222445043e-05,
+ "loss": 0.7548,
+ "step": 1315
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7293757694214423e-05,
+ "loss": 0.7697,
+ "step": 1316
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289332115729554e-05,
+ "loss": 0.7356,
+ "step": 1317
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7284903488841212e-05,
+ "loss": 0.7369,
+ "step": 1318
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280471815401453e-05,
+ "loss": 0.7302,
+ "step": 1319
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.727603709726361e-05,
+ "loss": 0.7024,
+ "step": 1320
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271599336282285e-05,
+ "loss": 0.8187,
+ "step": 1321
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.726715853431335e-05,
+ "loss": 0.6998,
+ "step": 1322
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7262714693213948e-05,
+ "loss": 0.7205,
+ "step": 1323
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7258267814842504e-05,
+ "loss": 0.708,
+ "step": 1324
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7253817901058698e-05,
+ "loss": 0.7971,
+ "step": 1325
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7249364953723492e-05,
+ "loss": 0.8136,
+ "step": 1326
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7244908974699112e-05,
+ "loss": 0.6568,
+ "step": 1327
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7240449965849043e-05,
+ "loss": 0.7022,
+ "step": 1328
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7235987929038056e-05,
+ "loss": 0.7467,
+ "step": 1329
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.723152286613217e-05,
+ "loss": 0.6982,
+ "step": 1330
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7227054778998682e-05,
+ "loss": 0.6738,
+ "step": 1331
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.722258366950615e-05,
+ "loss": 0.7785,
+ "step": 1332
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.721810953952439e-05,
+ "loss": 0.7012,
+ "step": 1333
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7213632390924487e-05,
+ "loss": 0.8047,
+ "step": 1334
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720915222557879e-05,
+ "loss": 0.7272,
+ "step": 1335
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7204669045360908e-05,
+ "loss": 0.7369,
+ "step": 1336
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7200182852145705e-05,
+ "loss": 0.7031,
+ "step": 1337
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7195693647809314e-05,
+ "loss": 0.6076,
+ "step": 1338
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7191201434229118e-05,
+ "loss": 0.747,
+ "step": 1339
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7186706213283773e-05,
+ "loss": 0.7329,
+ "step": 1340
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7182207986853176e-05,
+ "loss": 0.8219,
+ "step": 1341
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.717770675681849e-05,
+ "loss": 0.7208,
+ "step": 1342
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7173202525062133e-05,
+ "loss": 0.6827,
+ "step": 1343
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7168695293467775e-05,
+ "loss": 0.7263,
+ "step": 1344
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716418506392035e-05,
+ "loss": 0.7702,
+ "step": 1345
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7159671838306033e-05,
+ "loss": 0.7706,
+ "step": 1346
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155155618512255e-05,
+ "loss": 0.7676,
+ "step": 1347
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7150636406427713e-05,
+ "loss": 0.6808,
+ "step": 1348
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146114203942332e-05,
+ "loss": 0.769,
+ "step": 1349
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7141589012947307e-05,
+ "loss": 0.7539,
+ "step": 1350
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137060835335076e-05,
+ "loss": 0.7574,
+ "step": 1351
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7132529672999323e-05,
+ "loss": 0.8215,
+ "step": 1352
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712799552783499e-05,
+ "loss": 0.7223,
+ "step": 1353
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712345840173825e-05,
+ "loss": 0.8174,
+ "step": 1354
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7118918296606536e-05,
+ "loss": 0.7138,
+ "step": 1355
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7114375214338524e-05,
+ "loss": 0.7606,
+ "step": 1356
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7109829156834133e-05,
+ "loss": 0.7301,
+ "step": 1357
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7105280125994527e-05,
+ "loss": 0.6328,
+ "step": 1358
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7100728123722115e-05,
+ "loss": 0.8154,
+ "step": 1359
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7096173151920547e-05,
+ "loss": 0.6796,
+ "step": 1360
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7091615212494714e-05,
+ "loss": 0.7443,
+ "step": 1361
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.708705430735075e-05,
+ "loss": 0.7922,
+ "step": 1362
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7082490438396027e-05,
+ "loss": 0.7503,
+ "step": 1363
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707792360753916e-05,
+ "loss": 0.8584,
+ "step": 1364
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707335381669e-05,
+ "loss": 0.7546,
+ "step": 1365
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7068781067759637e-05,
+ "loss": 0.7975,
+ "step": 1366
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7064205362660397e-05,
+ "loss": 0.7861,
+ "step": 1367
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.705962670330584e-05,
+ "loss": 0.764,
+ "step": 1368
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7055045091610772e-05,
+ "loss": 0.7416,
+ "step": 1369
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7050460529491218e-05,
+ "loss": 0.853,
+ "step": 1370
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7045873018864452e-05,
+ "loss": 0.6929,
+ "step": 1371
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.704128256164897e-05,
+ "loss": 0.6582,
+ "step": 1372
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70366891597645e-05,
+ "loss": 0.7042,
+ "step": 1373
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7032092815132013e-05,
+ "loss": 0.8193,
+ "step": 1374
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70274935296737e-05,
+ "loss": 0.8375,
+ "step": 1375
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7022891305312987e-05,
+ "loss": 0.7241,
+ "step": 1376
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.701828614397452e-05,
+ "loss": 0.7602,
+ "step": 1377
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7013678047584196e-05,
+ "loss": 0.793,
+ "step": 1378
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7009067018069106e-05,
+ "loss": 0.6387,
+ "step": 1379
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.70044530573576e-05,
+ "loss": 0.6972,
+ "step": 1380
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6999836167379228e-05,
+ "loss": 0.7241,
+ "step": 1381
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6995216350064785e-05,
+ "loss": 0.7258,
+ "step": 1382
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6990593607346276e-05,
+ "loss": 0.7683,
+ "step": 1383
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.698596794115694e-05,
+ "loss": 0.7349,
+ "step": 1384
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.698133935343123e-05,
+ "loss": 0.7577,
+ "step": 1385
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6976707846104825e-05,
+ "loss": 0.7612,
+ "step": 1386
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6972073421114626e-05,
+ "loss": 0.7319,
+ "step": 1387
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.696743608039875e-05,
+ "loss": 0.7335,
+ "step": 1388
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6962795825896537e-05,
+ "loss": 0.747,
+ "step": 1389
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695815265954855e-05,
+ "loss": 0.6887,
+ "step": 1390
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695350658329655e-05,
+ "loss": 0.7299,
+ "step": 1391
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6948857599083543e-05,
+ "loss": 0.8458,
+ "step": 1392
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6944205708853735e-05,
+ "loss": 0.6917,
+ "step": 1393
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.693955091455254e-05,
+ "loss": 0.7914,
+ "step": 1394
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6934893218126607e-05,
+ "loss": 0.7189,
+ "step": 1395
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6930232621523785e-05,
+ "loss": 0.7274,
+ "step": 1396
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925569126693135e-05,
+ "loss": 0.6961,
+ "step": 1397
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6920902735584938e-05,
+ "loss": 0.7771,
+ "step": 1398
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6916233450150675e-05,
+ "loss": 0.7864,
+ "step": 1399
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6911561272343054e-05,
+ "loss": 0.7757,
+ "step": 1400
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6906886204115974e-05,
+ "loss": 0.7609,
+ "step": 1401
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.690220824742456e-05,
+ "loss": 0.7352,
+ "step": 1402
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6897527404225128e-05,
+ "loss": 0.7299,
+ "step": 1403
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6892843676475212e-05,
+ "loss": 0.7339,
+ "step": 1404
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6888157066133555e-05,
+ "loss": 0.7984,
+ "step": 1405
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6883467575160096e-05,
+ "loss": 0.799,
+ "step": 1406
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6878775205515983e-05,
+ "loss": 0.7204,
+ "step": 1407
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6874079959163573e-05,
+ "loss": 0.7179,
+ "step": 1408
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6869381838066412e-05,
+ "loss": 0.8097,
+ "step": 1409
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6864680844189268e-05,
+ "loss": 0.6365,
+ "step": 1410
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.685997697949809e-05,
+ "loss": 0.7405,
+ "step": 1411
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6855270245960044e-05,
+ "loss": 0.7674,
+ "step": 1412
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6850560645543485e-05,
+ "loss": 0.7562,
+ "step": 1413
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6845848180217978e-05,
+ "loss": 0.7177,
+ "step": 1414
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684113285195427e-05,
+ "loss": 0.8248,
+ "step": 1415
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6836414662724316e-05,
+ "loss": 0.7401,
+ "step": 1416
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6831693614501272e-05,
+ "loss": 0.7777,
+ "step": 1417
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6826969709259476e-05,
+ "loss": 0.6543,
+ "step": 1418
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6822242948974475e-05,
+ "loss": 0.7595,
+ "step": 1419
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6817513335623e-05,
+ "loss": 0.7058,
+ "step": 1420
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6812780871182976e-05,
+ "loss": 0.6764,
+ "step": 1421
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6808045557633528e-05,
+ "loss": 0.6154,
+ "step": 1422
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6803307396954962e-05,
+ "loss": 0.6798,
+ "step": 1423
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6798566391128782e-05,
+ "loss": 0.8107,
+ "step": 1424
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793822542137683e-05,
+ "loss": 0.7457,
+ "step": 1425
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6789075851965542e-05,
+ "loss": 0.7191,
+ "step": 1426
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6784326322597427e-05,
+ "loss": 0.6842,
+ "step": 1427
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6779573956019596e-05,
+ "loss": 0.7515,
+ "step": 1428
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6774818754219495e-05,
+ "loss": 0.7088,
+ "step": 1429
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.677006071918575e-05,
+ "loss": 0.7678,
+ "step": 1430
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.676529985290817e-05,
+ "loss": 0.5985,
+ "step": 1431
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6760536157377754e-05,
+ "loss": 0.6956,
+ "step": 1432
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675576963458669e-05,
+ "loss": 0.729,
+ "step": 1433
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675100028652833e-05,
+ "loss": 0.8609,
+ "step": 1434
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674622811519723e-05,
+ "loss": 0.788,
+ "step": 1435
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6741453122589107e-05,
+ "loss": 0.7569,
+ "step": 1436
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673667531070087e-05,
+ "loss": 0.8529,
+ "step": 1437
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6731894681530602e-05,
+ "loss": 0.8312,
+ "step": 1438
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672711123707756e-05,
+ "loss": 0.8155,
+ "step": 1439
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6722324979342194e-05,
+ "loss": 0.7782,
+ "step": 1440
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6717535910326108e-05,
+ "loss": 0.6951,
+ "step": 1441
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6712744032032103e-05,
+ "loss": 0.7403,
+ "step": 1442
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6707949346464147e-05,
+ "loss": 0.7348,
+ "step": 1443
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6703151855627373e-05,
+ "loss": 0.7888,
+ "step": 1444
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.66983515615281e-05,
+ "loss": 0.6957,
+ "step": 1445
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6693548466173812e-05,
+ "loss": 0.7451,
+ "step": 1446
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6688742571573167e-05,
+ "loss": 0.7986,
+ "step": 1447
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6683933879735993e-05,
+ "loss": 0.7837,
+ "step": 1448
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6679122392673284e-05,
+ "loss": 0.7383,
+ "step": 1449
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6674308112397216e-05,
+ "loss": 0.6651,
+ "step": 1450
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6669491040921123e-05,
+ "loss": 0.6422,
+ "step": 1451
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.66646711802595e-05,
+ "loss": 0.7201,
+ "step": 1452
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665984853242802e-05,
+ "loss": 0.7486,
+ "step": 1453
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6655023099443516e-05,
+ "loss": 0.8014,
+ "step": 1454
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650194883323995e-05,
+ "loss": 0.6947,
+ "step": 1455
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6645363886088607e-05,
+ "loss": 0.6796,
+ "step": 1456
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664053010975769e-05,
+ "loss": 0.8098,
+ "step": 1457
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.663569355635273e-05,
+ "loss": 0.8203,
+ "step": 1458
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6630854227896364e-05,
+ "loss": 0.7075,
+ "step": 1459
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.662601212641242e-05,
+ "loss": 0.8253,
+ "step": 1460
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6621167253925857e-05,
+ "loss": 0.7236,
+ "step": 1461
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6616319612462813e-05,
+ "loss": 0.7418,
+ "step": 1462
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6611469204050566e-05,
+ "loss": 0.7383,
+ "step": 1463
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6606616030717563e-05,
+ "loss": 0.7783,
+ "step": 1464
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6601760094493408e-05,
+ "loss": 0.7682,
+ "step": 1465
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.659690139740885e-05,
+ "loss": 0.7362,
+ "step": 1466
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6592039941495803e-05,
+ "loss": 0.7953,
+ "step": 1467
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6587175728787332e-05,
+ "loss": 0.6928,
+ "step": 1468
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6582308761317652e-05,
+ "loss": 0.7382,
+ "step": 1469
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6577439041122133e-05,
+ "loss": 0.7993,
+ "step": 1470
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6572566570237293e-05,
+ "loss": 0.7542,
+ "step": 1471
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6567691350700805e-05,
+ "loss": 0.681,
+ "step": 1472
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656281338455149e-05,
+ "loss": 0.703,
+ "step": 1473
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6557932673829313e-05,
+ "loss": 0.7336,
+ "step": 1474
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6553049220575392e-05,
+ "loss": 0.7728,
+ "step": 1475
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6548163026831995e-05,
+ "loss": 0.8151,
+ "step": 1476
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6543274094642522e-05,
+ "loss": 0.7633,
+ "step": 1477
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6538382426051533e-05,
+ "loss": 0.6718,
+ "step": 1478
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6533488023104732e-05,
+ "loss": 0.8212,
+ "step": 1479
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6528590887848953e-05,
+ "loss": 0.8092,
+ "step": 1480
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6523691022332184e-05,
+ "loss": 0.7717,
+ "step": 1481
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6518788428603553e-05,
+ "loss": 0.6492,
+ "step": 1482
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6513883108713326e-05,
+ "loss": 0.8912,
+ "step": 1483
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6508975064712916e-05,
+ "loss": 0.7717,
+ "step": 1484
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6504064298654864e-05,
+ "loss": 0.7489,
+ "step": 1485
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6499150812592858e-05,
+ "loss": 0.7864,
+ "step": 1486
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6494234608581722e-05,
+ "loss": 0.7487,
+ "step": 1487
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6489315688677416e-05,
+ "loss": 0.7656,
+ "step": 1488
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6484394054937033e-05,
+ "loss": 0.7326,
+ "step": 1489
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.647946970941881e-05,
+ "loss": 0.6866,
+ "step": 1490
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64745426541821e-05,
+ "loss": 0.7713,
+ "step": 1491
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.646961289128741e-05,
+ "loss": 0.7923,
+ "step": 1492
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6464680422796367e-05,
+ "loss": 0.721,
+ "step": 1493
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6459745250771735e-05,
+ "loss": 0.6573,
+ "step": 1494
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64548073772774e-05,
+ "loss": 0.6694,
+ "step": 1495
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.644986680437839e-05,
+ "loss": 0.7719,
+ "step": 1496
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6444923534140856e-05,
+ "loss": 0.6449,
+ "step": 1497
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.643997756863207e-05,
+ "loss": 0.7626,
+ "step": 1498
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.643502890992045e-05,
+ "loss": 0.8497,
+ "step": 1499
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6430077560075513e-05,
+ "loss": 0.8186,
+ "step": 1500
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.642512352116793e-05,
+ "loss": 0.7802,
+ "step": 1501
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6420166795269476e-05,
+ "loss": 0.7119,
+ "step": 1502
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641520738445306e-05,
+ "loss": 0.7841,
+ "step": 1503
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641024529079271e-05,
+ "loss": 0.7351,
+ "step": 1504
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405280516363576e-05,
+ "loss": 0.7318,
+ "step": 1505
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.640031306324193e-05,
+ "loss": 0.6854,
+ "step": 1506
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.639534293350517e-05,
+ "loss": 0.7275,
+ "step": 1507
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6390370129231796e-05,
+ "loss": 0.7516,
+ "step": 1508
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6385394652501444e-05,
+ "loss": 0.6772,
+ "step": 1509
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6380416505394866e-05,
+ "loss": 0.7683,
+ "step": 1510
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.637543568999392e-05,
+ "loss": 0.7892,
+ "step": 1511
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6370452208381595e-05,
+ "loss": 0.6444,
+ "step": 1512
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6365466062641974e-05,
+ "loss": 0.6967,
+ "step": 1513
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6360477254860275e-05,
+ "loss": 0.7492,
+ "step": 1514
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6355485787122814e-05,
+ "loss": 0.8201,
+ "step": 1515
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6350491661517032e-05,
+ "loss": 0.8195,
+ "step": 1516
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6345494880131476e-05,
+ "loss": 0.7622,
+ "step": 1517
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6340495445055797e-05,
+ "loss": 0.6371,
+ "step": 1518
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.633549335838077e-05,
+ "loss": 0.7147,
+ "step": 1519
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6330488622198266e-05,
+ "loss": 0.7477,
+ "step": 1520
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.632548123860127e-05,
+ "loss": 0.7144,
+ "step": 1521
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.632047120968387e-05,
+ "loss": 0.7153,
+ "step": 1522
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6315458537541272e-05,
+ "loss": 0.6666,
+ "step": 1523
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6310443224269767e-05,
+ "loss": 0.7569,
+ "step": 1524
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6305425271966772e-05,
+ "loss": 0.7888,
+ "step": 1525
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6300404682730793e-05,
+ "loss": 0.72,
+ "step": 1526
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6295381458661447e-05,
+ "loss": 0.7176,
+ "step": 1527
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6290355601859443e-05,
+ "loss": 0.731,
+ "step": 1528
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6285327114426608e-05,
+ "loss": 0.6689,
+ "step": 1529
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.628029599846585e-05,
+ "loss": 0.809,
+ "step": 1530
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627526225608119e-05,
+ "loss": 0.7013,
+ "step": 1531
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627022588937774e-05,
+ "loss": 0.7828,
+ "step": 1532
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265186900461715e-05,
+ "loss": 0.7493,
+ "step": 1533
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6260145291440415e-05,
+ "loss": 0.6385,
+ "step": 1534
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255101064422252e-05,
+ "loss": 0.5647,
+ "step": 1535
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6250054221516723e-05,
+ "loss": 0.734,
+ "step": 1536
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6245004764834423e-05,
+ "loss": 0.6514,
+ "step": 1537
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6239952696487033e-05,
+ "loss": 0.6952,
+ "step": 1538
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6234898018587336e-05,
+ "loss": 0.7515,
+ "step": 1539
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.62298407332492e-05,
+ "loss": 0.7726,
+ "step": 1540
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6224780842587586e-05,
+ "loss": 0.792,
+ "step": 1541
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6219718348718544e-05,
+ "loss": 0.6739,
+ "step": 1542
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6214653253759206e-05,
+ "loss": 0.7159,
+ "step": 1543
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6209585559827806e-05,
+ "loss": 0.745,
+ "step": 1544
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6204515269043653e-05,
+ "loss": 0.857,
+ "step": 1545
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6199442383527148e-05,
+ "loss": 0.7269,
+ "step": 1546
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619436690539977e-05,
+ "loss": 0.7485,
+ "step": 1547
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6189288836784097e-05,
+ "loss": 0.7658,
+ "step": 1548
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6184208179803772e-05,
+ "loss": 0.833,
+ "step": 1549
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617912493658353e-05,
+ "loss": 0.6926,
+ "step": 1550
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617403910924919e-05,
+ "loss": 0.6711,
+ "step": 1551
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6168950699927648e-05,
+ "loss": 0.7232,
+ "step": 1552
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6163859710746878e-05,
+ "loss": 0.7606,
+ "step": 1553
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6158766143835934e-05,
+ "loss": 0.7212,
+ "step": 1554
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153670001324955e-05,
+ "loss": 0.6983,
+ "step": 1555
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6148571285345146e-05,
+ "loss": 0.692,
+ "step": 1556
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143469998028792e-05,
+ "loss": 0.8145,
+ "step": 1557
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613836614150926e-05,
+ "loss": 0.6441,
+ "step": 1558
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613325971792098e-05,
+ "loss": 0.683,
+ "step": 1559
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6128150729399467e-05,
+ "loss": 0.7126,
+ "step": 1560
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123039178081303e-05,
+ "loss": 0.738,
+ "step": 1561
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6117925066104137e-05,
+ "loss": 0.8437,
+ "step": 1562
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61128083956067e-05,
+ "loss": 0.7628,
+ "step": 1563
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6107689168728778e-05,
+ "loss": 0.7351,
+ "step": 1564
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.610256738761125e-05,
+ "loss": 0.6617,
+ "step": 1565
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6097443054396036e-05,
+ "loss": 0.8093,
+ "step": 1566
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609231617122614e-05,
+ "loss": 0.6898,
+ "step": 1567
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6087186740245626e-05,
+ "loss": 0.8054,
+ "step": 1568
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6082054763599627e-05,
+ "loss": 0.6924,
+ "step": 1569
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6076920243434338e-05,
+ "loss": 0.7306,
+ "step": 1570
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607178318189702e-05,
+ "loss": 0.7167,
+ "step": 1571
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6066643581135994e-05,
+ "loss": 0.8181,
+ "step": 1572
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6061501443300644e-05,
+ "loss": 0.7861,
+ "step": 1573
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605635677054142e-05,
+ "loss": 0.7629,
+ "step": 1574
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605120956500982e-05,
+ "loss": 0.6616,
+ "step": 1575
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6046059828858415e-05,
+ "loss": 0.7085,
+ "step": 1576
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6040907564240823e-05,
+ "loss": 0.792,
+ "step": 1577
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.603575277331173e-05,
+ "loss": 0.8169,
+ "step": 1578
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6030595458226872e-05,
+ "loss": 0.6916,
+ "step": 1579
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6025435621143036e-05,
+ "loss": 0.7128,
+ "step": 1580
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6020273264218076e-05,
+ "loss": 0.6661,
+ "step": 1581
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6015108389610887e-05,
+ "loss": 0.7186,
+ "step": 1582
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600994099948143e-05,
+ "loss": 0.7503,
+ "step": 1583
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.60047710959907e-05,
+ "loss": 0.679,
+ "step": 1584
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5999598681300766e-05,
+ "loss": 0.7657,
+ "step": 1585
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.599442375757473e-05,
+ "loss": 0.7859,
+ "step": 1586
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989246326976746e-05,
+ "loss": 0.7219,
+ "step": 1587
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5984066391672025e-05,
+ "loss": 0.705,
+ "step": 1588
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5978883953826814e-05,
+ "loss": 0.7309,
+ "step": 1589
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.597369901560841e-05,
+ "loss": 0.7308,
+ "step": 1590
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.596851157918517e-05,
+ "loss": 0.7455,
+ "step": 1591
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5963321646726468e-05,
+ "loss": 0.7335,
+ "step": 1592
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5958129220402745e-05,
+ "loss": 0.7141,
+ "step": 1593
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5952934302385477e-05,
+ "loss": 0.7449,
+ "step": 1594
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.594773689484718e-05,
+ "loss": 0.6849,
+ "step": 1595
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5942536999961416e-05,
+ "loss": 0.7443,
+ "step": 1596
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593733461990278e-05,
+ "loss": 0.808,
+ "step": 1597
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5932129756846915e-05,
+ "loss": 0.7715,
+ "step": 1598
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5926922412970495e-05,
+ "loss": 0.6851,
+ "step": 1599
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5921712590451238e-05,
+ "loss": 0.6711,
+ "step": 1600
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5916500291467892e-05,
+ "loss": 0.7635,
+ "step": 1601
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.591128551820025e-05,
+ "loss": 0.771,
+ "step": 1602
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5906068272829122e-05,
+ "loss": 0.7836,
+ "step": 1603
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.590084855753637e-05,
+ "loss": 0.7583,
+ "step": 1604
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.589562637450489e-05,
+ "loss": 0.7267,
+ "step": 1605
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.589040172591859e-05,
+ "loss": 0.647,
+ "step": 1606
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5885174613962427e-05,
+ "loss": 0.8862,
+ "step": 1607
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.587994504082238e-05,
+ "loss": 0.7666,
+ "step": 1608
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874713008685464e-05,
+ "loss": 0.7324,
+ "step": 1609
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.586947851973972e-05,
+ "loss": 0.8052,
+ "step": 1610
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5864241576174205e-05,
+ "loss": 0.6958,
+ "step": 1611
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5859002180179023e-05,
+ "loss": 0.6818,
+ "step": 1612
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5853760333945282e-05,
+ "loss": 0.6681,
+ "step": 1613
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.584851603966514e-05,
+ "loss": 0.687,
+ "step": 1614
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5843269299531748e-05,
+ "loss": 0.66,
+ "step": 1615
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5838020115739308e-05,
+ "loss": 0.7152,
+ "step": 1616
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.583276849048303e-05,
+ "loss": 0.7761,
+ "step": 1617
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5827514425959143e-05,
+ "loss": 0.712,
+ "step": 1618
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5822257924364902e-05,
+ "loss": 0.7366,
+ "step": 1619
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581699898789858e-05,
+ "loss": 0.641,
+ "step": 1620
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581173761875947e-05,
+ "loss": 0.6516,
+ "step": 1621
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5806473819147875e-05,
+ "loss": 0.7149,
+ "step": 1622
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580120759126512e-05,
+ "loss": 0.77,
+ "step": 1623
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5795938937313554e-05,
+ "loss": 0.7509,
+ "step": 1624
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579066785949652e-05,
+ "loss": 0.6557,
+ "step": 1625
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5785394360018388e-05,
+ "loss": 0.7448,
+ "step": 1626
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5780118441084544e-05,
+ "loss": 0.6875,
+ "step": 1627
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5774840104901378e-05,
+ "loss": 0.8116,
+ "step": 1628
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5769559353676297e-05,
+ "loss": 0.7587,
+ "step": 1629
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5764276189617712e-05,
+ "loss": 0.7257,
+ "step": 1630
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5758990614935042e-05,
+ "loss": 0.6683,
+ "step": 1631
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5753702631838725e-05,
+ "loss": 0.7341,
+ "step": 1632
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5748412242540193e-05,
+ "loss": 0.7667,
+ "step": 1633
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.574311944925189e-05,
+ "loss": 0.8283,
+ "step": 1634
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737824254187275e-05,
+ "loss": 0.7506,
+ "step": 1635
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5732526659560788e-05,
+ "loss": 0.7063,
+ "step": 1636
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727226667587898e-05,
+ "loss": 0.7814,
+ "step": 1637
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.572192428048506e-05,
+ "loss": 0.7022,
+ "step": 1638
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716619500469735e-05,
+ "loss": 0.74,
+ "step": 1639
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5711312329760388e-05,
+ "loss": 0.7336,
+ "step": 1640
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706002770576478e-05,
+ "loss": 0.8514,
+ "step": 1641
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5700690825138473e-05,
+ "loss": 0.6543,
+ "step": 1642
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5695376495667828e-05,
+ "loss": 0.6877,
+ "step": 1643
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5690059784386992e-05,
+ "loss": 0.7848,
+ "step": 1644
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568474069351943e-05,
+ "loss": 0.7766,
+ "step": 1645
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.567941922528958e-05,
+ "loss": 0.7833,
+ "step": 1646
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5674095381922886e-05,
+ "loss": 0.8355,
+ "step": 1647
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5668769165645787e-05,
+ "loss": 0.775,
+ "step": 1648
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5663440578685706e-05,
+ "loss": 0.7363,
+ "step": 1649
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5658109623271057e-05,
+ "loss": 0.7449,
+ "step": 1650
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565277630163126e-05,
+ "loss": 0.7462,
+ "step": 1651
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5647440615996706e-05,
+ "loss": 0.6734,
+ "step": 1652
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564210256859878e-05,
+ "loss": 0.7345,
+ "step": 1653
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5636762161669864e-05,
+ "loss": 0.8243,
+ "step": 1654
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.563141939744332e-05,
+ "loss": 0.6479,
+ "step": 1655
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5626074278153485e-05,
+ "loss": 0.6897,
+ "step": 1656
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56207268060357e-05,
+ "loss": 0.7988,
+ "step": 1657
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5615376983326282e-05,
+ "loss": 0.7537,
+ "step": 1658
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5610024812262526e-05,
+ "loss": 0.6609,
+ "step": 1659
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.560467029508272e-05,
+ "loss": 0.7001,
+ "step": 1660
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559931343402612e-05,
+ "loss": 0.7246,
+ "step": 1661
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559395423133297e-05,
+ "loss": 0.8111,
+ "step": 1662
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588592689244492e-05,
+ "loss": 0.8332,
+ "step": 1663
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.558322881000289e-05,
+ "loss": 0.7976,
+ "step": 1664
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5577862595851335e-05,
+ "loss": 0.7605,
+ "step": 1665
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.557249404903399e-05,
+ "loss": 0.6917,
+ "step": 1666
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.556712317179598e-05,
+ "loss": 0.7052,
+ "step": 1667
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5561749966383408e-05,
+ "loss": 0.7405,
+ "step": 1668
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5556374435043353e-05,
+ "loss": 0.7763,
+ "step": 1669
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.555099658002387e-05,
+ "loss": 0.6396,
+ "step": 1670
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5545616403573977e-05,
+ "loss": 0.7304,
+ "step": 1671
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5540233907943663e-05,
+ "loss": 0.7782,
+ "step": 1672
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5534849095383902e-05,
+ "loss": 0.7192,
+ "step": 1673
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5529461968146618e-05,
+ "loss": 0.769,
+ "step": 1674
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5524072528484716e-05,
+ "loss": 0.7548,
+ "step": 1675
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.551868077865206e-05,
+ "loss": 0.6997,
+ "step": 1676
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5513286720903488e-05,
+ "loss": 0.7166,
+ "step": 1677
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.550789035749479e-05,
+ "loss": 0.6591,
+ "step": 1678
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5502491690682732e-05,
+ "loss": 0.7084,
+ "step": 1679
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5497090722725045e-05,
+ "loss": 0.7608,
+ "step": 1680
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5491687455880414e-05,
+ "loss": 0.7108,
+ "step": 1681
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5486281892408488e-05,
+ "loss": 0.6579,
+ "step": 1682
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5480874034569876e-05,
+ "loss": 0.6725,
+ "step": 1683
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5475463884626153e-05,
+ "loss": 0.7579,
+ "step": 1684
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547005144483984e-05,
+ "loss": 0.6723,
+ "step": 1685
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.546463671747443e-05,
+ "loss": 0.7308,
+ "step": 1686
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5459219704794358e-05,
+ "loss": 0.7185,
+ "step": 1687
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.545380040906503e-05,
+ "loss": 0.4987,
+ "step": 1688
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5448378832552796e-05,
+ "loss": 0.7384,
+ "step": 1689
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5442954977524958e-05,
+ "loss": 0.7546,
+ "step": 1690
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437528846249783e-05,
+ "loss": 0.6611,
+ "step": 1691
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5432100440996477e-05,
+ "loss": 0.8177,
+ "step": 1692
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542666976403521e-05,
+ "loss": 0.6821,
+ "step": 1693
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5421236817637085e-05,
+ "loss": 0.7127,
+ "step": 1694
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.541580160407417e-05,
+ "loss": 0.7526,
+ "step": 1695
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5410364125619474e-05,
+ "loss": 0.7527,
+ "step": 1696
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5404924384546955e-05,
+ "loss": 0.7959,
+ "step": 1697
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5399482383131516e-05,
+ "loss": 0.8049,
+ "step": 1698
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5394038123649e-05,
+ "loss": 0.6542,
+ "step": 1699
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5388591608376206e-05,
+ "loss": 0.6657,
+ "step": 1700
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5383142839590872e-05,
+ "loss": 0.7473,
+ "step": 1701
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.537769181957167e-05,
+ "loss": 0.8284,
+ "step": 1702
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5372238550598225e-05,
+ "loss": 0.6112,
+ "step": 1703
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5366783034951094e-05,
+ "loss": 0.7531,
+ "step": 1704
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536132527491178e-05,
+ "loss": 0.6677,
+ "step": 1705
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5355865272762723e-05,
+ "loss": 0.7272,
+ "step": 1706
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53504030307873e-05,
+ "loss": 0.7405,
+ "step": 1707
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.534493855126981e-05,
+ "loss": 0.7734,
+ "step": 1708
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5339471836495524e-05,
+ "loss": 0.7435,
+ "step": 1709
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5334002888750616e-05,
+ "loss": 0.7754,
+ "step": 1710
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5328531710322198e-05,
+ "loss": 0.7375,
+ "step": 1711
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5323058303498326e-05,
+ "loss": 0.7408,
+ "step": 1712
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5317582670567986e-05,
+ "loss": 0.6708,
+ "step": 1713
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5312104813821083e-05,
+ "loss": 0.7956,
+ "step": 1714
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5306624735548465e-05,
+ "loss": 0.7467,
+ "step": 1715
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5301142438041906e-05,
+ "loss": 0.7286,
+ "step": 1716
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52956579235941e-05,
+ "loss": 0.76,
+ "step": 1717
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.529017119449868e-05,
+ "loss": 0.7728,
+ "step": 1718
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52846822530502e-05,
+ "loss": 0.7438,
+ "step": 1719
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5279191101544136e-05,
+ "loss": 0.7513,
+ "step": 1720
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527369774227689e-05,
+ "loss": 0.756,
+ "step": 1721
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5268202177545793e-05,
+ "loss": 0.803,
+ "step": 1722
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.526270440964909e-05,
+ "loss": 0.6694,
+ "step": 1723
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5257204440885953e-05,
+ "loss": 0.8317,
+ "step": 1724
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5251702273556475e-05,
+ "loss": 0.817,
+ "step": 1725
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5246197909961659e-05,
+ "loss": 0.681,
+ "step": 1726
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5240691352403441e-05,
+ "loss": 0.7328,
+ "step": 1727
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5235182603184662e-05,
+ "loss": 0.7685,
+ "step": 1728
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5229671664609088e-05,
+ "loss": 0.8128,
+ "step": 1729
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.522415853898139e-05,
+ "loss": 0.7691,
+ "step": 1730
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521864322860717e-05,
+ "loss": 0.7197,
+ "step": 1731
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521312573579293e-05,
+ "loss": 0.7015,
+ "step": 1732
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5207606062846092e-05,
+ "loss": 0.7977,
+ "step": 1733
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.520208421207498e-05,
+ "loss": 0.8209,
+ "step": 1734
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5196560185788842e-05,
+ "loss": 0.8075,
+ "step": 1735
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.519103398629783e-05,
+ "loss": 0.6191,
+ "step": 1736
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5185505615912998e-05,
+ "loss": 0.6489,
+ "step": 1737
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.517997507694632e-05,
+ "loss": 0.7604,
+ "step": 1738
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.517444237171067e-05,
+ "loss": 0.8096,
+ "step": 1739
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5168907502519823e-05,
+ "loss": 0.7436,
+ "step": 1740
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5163370471688471e-05,
+ "loss": 0.8072,
+ "step": 1741
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5157831281532205e-05,
+ "loss": 0.7192,
+ "step": 1742
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152289934367516e-05,
+ "loss": 0.6214,
+ "step": 1743
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5146746432511793e-05,
+ "loss": 0.7052,
+ "step": 1744
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141200778283345e-05,
+ "loss": 0.5753,
+ "step": 1745
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5135652974001358e-05,
+ "loss": 0.7949,
+ "step": 1746
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5130103021985929e-05,
+ "loss": 0.7902,
+ "step": 1747
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5124550924558055e-05,
+ "loss": 0.8245,
+ "step": 1748
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5118996684039621e-05,
+ "loss": 0.7064,
+ "step": 1749
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5113440302753422e-05,
+ "loss": 0.7824,
+ "step": 1750
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5107881783023138e-05,
+ "loss": 0.7046,
+ "step": 1751
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5102321127173335e-05,
+ "loss": 0.6545,
+ "step": 1752
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50967583375295e-05,
+ "loss": 0.7581,
+ "step": 1753
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5091193416417981e-05,
+ "loss": 0.7451,
+ "step": 1754
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5085626366166039e-05,
+ "loss": 0.8016,
+ "step": 1755
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.508005718910182e-05,
+ "loss": 0.7555,
+ "step": 1756
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5074485887554351e-05,
+ "loss": 0.6852,
+ "step": 1757
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5068912463853555e-05,
+ "loss": 0.6857,
+ "step": 1758
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5063336920330244e-05,
+ "loss": 0.7968,
+ "step": 1759
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5057759259316118e-05,
+ "loss": 0.677,
+ "step": 1760
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5052179483143752e-05,
+ "loss": 0.7693,
+ "step": 1761
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5046597594146612e-05,
+ "loss": 0.7695,
+ "step": 1762
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5041013594659051e-05,
+ "loss": 0.6936,
+ "step": 1763
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5035427487016298e-05,
+ "loss": 0.8426,
+ "step": 1764
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5029839273554469e-05,
+ "loss": 0.7339,
+ "step": 1765
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5024248956610561e-05,
+ "loss": 0.7443,
+ "step": 1766
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5018656538522442e-05,
+ "loss": 0.7881,
+ "step": 1767
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.501306202162887e-05,
+ "loss": 0.705,
+ "step": 1768
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5007465408269477e-05,
+ "loss": 0.6096,
+ "step": 1769
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5001866700784762e-05,
+ "loss": 0.6653,
+ "step": 1770
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996265901516116e-05,
+ "loss": 0.6235,
+ "step": 1771
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4990663012805792e-05,
+ "loss": 0.7398,
+ "step": 1772
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985058036996929e-05,
+ "loss": 0.7514,
+ "step": 1773
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4979450976433525e-05,
+ "loss": 0.799,
+ "step": 1774
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4973841833460457e-05,
+ "loss": 0.7502,
+ "step": 1775
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.496823061042347e-05,
+ "loss": 0.6976,
+ "step": 1776
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4962617309669192e-05,
+ "loss": 0.7587,
+ "step": 1777
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4957001933545096e-05,
+ "loss": 0.6908,
+ "step": 1778
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4951384484399542e-05,
+ "loss": 0.711,
+ "step": 1779
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4945764964581755e-05,
+ "loss": 0.734,
+ "step": 1780
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4940143376441812e-05,
+ "loss": 0.6212,
+ "step": 1781
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.493451972233067e-05,
+ "loss": 0.8047,
+ "step": 1782
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.492889400460015e-05,
+ "loss": 0.8197,
+ "step": 1783
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4923266225602925e-05,
+ "loss": 0.5608,
+ "step": 1784
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.491763638769253e-05,
+ "loss": 0.712,
+ "step": 1785
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4912004493223382e-05,
+ "loss": 0.742,
+ "step": 1786
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.490637054455073e-05,
+ "loss": 0.6844,
+ "step": 1787
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4900734544030701e-05,
+ "loss": 0.7583,
+ "step": 1788
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4895096494020274e-05,
+ "loss": 0.7334,
+ "step": 1789
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.488945639687728e-05,
+ "loss": 0.7061,
+ "step": 1790
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4883814254960418e-05,
+ "loss": 0.7575,
+ "step": 1791
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4878170070629235e-05,
+ "loss": 0.7007,
+ "step": 1792
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4872523846244127e-05,
+ "loss": 0.7918,
+ "step": 1793
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4866875584166352e-05,
+ "loss": 0.6549,
+ "step": 1794
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4861225286758018e-05,
+ "loss": 0.8163,
+ "step": 1795
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4855572956382082e-05,
+ "loss": 0.7962,
+ "step": 1796
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4849918595402353e-05,
+ "loss": 0.7634,
+ "step": 1797
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4844262206183488e-05,
+ "loss": 0.7166,
+ "step": 1798
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4838603791090989e-05,
+ "loss": 0.8057,
+ "step": 1799
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4832943352491213e-05,
+ "loss": 0.7036,
+ "step": 1800
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4827280892751364e-05,
+ "loss": 0.7223,
+ "step": 1801
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4821616414239477e-05,
+ "loss": 0.5729,
+ "step": 1802
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4815949919324443e-05,
+ "loss": 0.7474,
+ "step": 1803
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4810281410375997e-05,
+ "loss": 0.7284,
+ "step": 1804
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4804610889764712e-05,
+ "loss": 0.7954,
+ "step": 1805
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4798938359862002e-05,
+ "loss": 0.6919,
+ "step": 1806
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4793263823040127e-05,
+ "loss": 0.7628,
+ "step": 1807
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4787587281672174e-05,
+ "loss": 0.8151,
+ "step": 1808
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4781908738132085e-05,
+ "loss": 0.6063,
+ "step": 1809
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4776228194794624e-05,
+ "loss": 0.8041,
+ "step": 1810
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4770545654035403e-05,
+ "loss": 0.7582,
+ "step": 1811
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.476486111823086e-05,
+ "loss": 0.6529,
+ "step": 1812
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4759174589758272e-05,
+ "loss": 0.771,
+ "step": 1813
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4753486070995752e-05,
+ "loss": 0.6272,
+ "step": 1814
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4747795564322243e-05,
+ "loss": 0.6906,
+ "step": 1815
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4742103072117516e-05,
+ "loss": 0.8176,
+ "step": 1816
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473640859676217e-05,
+ "loss": 0.6853,
+ "step": 1817
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4730712140637648e-05,
+ "loss": 0.694,
+ "step": 1818
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4725013706126207e-05,
+ "loss": 0.7544,
+ "step": 1819
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4719313295610935e-05,
+ "loss": 0.7372,
+ "step": 1820
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471361091147575e-05,
+ "loss": 0.783,
+ "step": 1821
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4707906556105392e-05,
+ "loss": 0.7296,
+ "step": 1822
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702200231885425e-05,
+ "loss": 0.8174,
+ "step": 1823
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.469649194120224e-05,
+ "loss": 0.7317,
+ "step": 1824
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4690781686443044e-05,
+ "loss": 0.6888,
+ "step": 1825
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.468506946999587e-05,
+ "loss": 0.6838,
+ "step": 1826
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4679355294249575e-05,
+ "loss": 0.7877,
+ "step": 1827
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4673639161593826e-05,
+ "loss": 0.6757,
+ "step": 1828
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4667921074419118e-05,
+ "loss": 0.7282,
+ "step": 1829
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4662201035116757e-05,
+ "loss": 0.8025,
+ "step": 1830
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4656479046078862e-05,
+ "loss": 0.7132,
+ "step": 1831
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4650755109698379e-05,
+ "loss": 0.7144,
+ "step": 1832
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4645029228369062e-05,
+ "loss": 0.7286,
+ "step": 1833
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4639301404485475e-05,
+ "loss": 0.7719,
+ "step": 1834
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4633571640442998e-05,
+ "loss": 0.7532,
+ "step": 1835
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4627839938637825e-05,
+ "loss": 0.7343,
+ "step": 1836
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4622106301466956e-05,
+ "loss": 0.7103,
+ "step": 1837
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.46163707313282e-05,
+ "loss": 0.7348,
+ "step": 1838
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4610633230620179e-05,
+ "loss": 0.757,
+ "step": 1839
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4604893801742316e-05,
+ "loss": 0.7478,
+ "step": 1840
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4599152447094846e-05,
+ "loss": 0.7004,
+ "step": 1841
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4593409169078811e-05,
+ "loss": 0.7153,
+ "step": 1842
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4587663970096048e-05,
+ "loss": 0.6499,
+ "step": 1843
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4581916852549206e-05,
+ "loss": 0.7289,
+ "step": 1844
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4576167818841732e-05,
+ "loss": 0.8305,
+ "step": 1845
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4570416871377874e-05,
+ "loss": 0.8731,
+ "step": 1846
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564664012562686e-05,
+ "loss": 0.7812,
+ "step": 1847
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4558909244802015e-05,
+ "loss": 0.7641,
+ "step": 1848
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4553152570502506e-05,
+ "loss": 0.7623,
+ "step": 1849
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4547393992071612e-05,
+ "loss": 0.6948,
+ "step": 1850
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541633511917567e-05,
+ "loss": 0.7485,
+ "step": 1851
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.453587113244941e-05,
+ "loss": 0.6613,
+ "step": 1852
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.453010685607697e-05,
+ "loss": 0.7053,
+ "step": 1853
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4524340685210874e-05,
+ "loss": 0.8147,
+ "step": 1854
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.451857262226254e-05,
+ "loss": 0.7468,
+ "step": 1855
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.451280266964417e-05,
+ "loss": 0.6444,
+ "step": 1856
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4507030829768765e-05,
+ "loss": 0.7882,
+ "step": 1857
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.450125710505011e-05,
+ "loss": 0.7396,
+ "step": 1858
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4495481497902789e-05,
+ "loss": 0.7315,
+ "step": 1859
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4489704010742154e-05,
+ "loss": 0.7927,
+ "step": 1860
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4483924645984359e-05,
+ "loss": 0.8066,
+ "step": 1861
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4478143406046337e-05,
+ "loss": 0.6507,
+ "step": 1862
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4472360293345807e-05,
+ "loss": 0.7613,
+ "step": 1863
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.446657531030127e-05,
+ "loss": 0.7986,
+ "step": 1864
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4460788459332011e-05,
+ "loss": 0.7187,
+ "step": 1865
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4454999742858093e-05,
+ "loss": 0.6992,
+ "step": 1866
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.444920916330036e-05,
+ "loss": 0.7254,
+ "step": 1867
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4443416723080441e-05,
+ "loss": 0.651,
+ "step": 1868
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4437622424620733e-05,
+ "loss": 0.7113,
+ "step": 1869
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.443182627034442e-05,
+ "loss": 0.7065,
+ "step": 1870
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4426028262675451e-05,
+ "loss": 0.7581,
+ "step": 1871
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4420228404038566e-05,
+ "loss": 0.7095,
+ "step": 1872
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441442669685926e-05,
+ "loss": 0.7671,
+ "step": 1873
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440862314356382e-05,
+ "loss": 0.7995,
+ "step": 1874
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440281774657929e-05,
+ "loss": 0.6854,
+ "step": 1875
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.439701050833349e-05,
+ "loss": 0.666,
+ "step": 1876
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391201431255014e-05,
+ "loss": 0.7484,
+ "step": 1877
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4385390517773216e-05,
+ "loss": 0.6946,
+ "step": 1878
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4379577770318234e-05,
+ "loss": 0.636,
+ "step": 1879
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4373763191320955e-05,
+ "loss": 0.7145,
+ "step": 1880
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.436794678321304e-05,
+ "loss": 0.6906,
+ "step": 1881
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.436212854842692e-05,
+ "loss": 0.7709,
+ "step": 1882
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4356308489395783e-05,
+ "loss": 0.7287,
+ "step": 1883
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4350486608553575e-05,
+ "loss": 0.7205,
+ "step": 1884
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4344662908335018e-05,
+ "loss": 0.7341,
+ "step": 1885
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4338837391175582e-05,
+ "loss": 0.7265,
+ "step": 1886
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4333010059511505e-05,
+ "loss": 0.7544,
+ "step": 1887
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4327180915779782e-05,
+ "loss": 0.7184,
+ "step": 1888
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4321349962418163e-05,
+ "loss": 0.699,
+ "step": 1889
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4315517201865155e-05,
+ "loss": 0.7518,
+ "step": 1890
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4309682636560027e-05,
+ "loss": 0.6806,
+ "step": 1891
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.430384626894279e-05,
+ "loss": 0.6866,
+ "step": 1892
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4298008101454226e-05,
+ "loss": 0.752,
+ "step": 1893
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4292168136535856e-05,
+ "loss": 0.7105,
+ "step": 1894
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4286326376629956e-05,
+ "loss": 0.6554,
+ "step": 1895
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4280482824179553e-05,
+ "loss": 0.8051,
+ "step": 1896
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4274637481628431e-05,
+ "loss": 0.7319,
+ "step": 1897
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4268790351421107e-05,
+ "loss": 0.7695,
+ "step": 1898
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4262941436002862e-05,
+ "loss": 0.7818,
+ "step": 1899
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4257090737819716e-05,
+ "loss": 0.7457,
+ "step": 1900
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.425123825931843e-05,
+ "loss": 0.7643,
+ "step": 1901
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4245384002946516e-05,
+ "loss": 0.7091,
+ "step": 1902
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.423952797115223e-05,
+ "loss": 0.7483,
+ "step": 1903
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4233670166384566e-05,
+ "loss": 0.6573,
+ "step": 1904
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4227810591093262e-05,
+ "loss": 0.6983,
+ "step": 1905
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4221949247728801e-05,
+ "loss": 0.6758,
+ "step": 1906
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216086138742395e-05,
+ "loss": 0.8339,
+ "step": 1907
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4210221266585997e-05,
+ "loss": 0.6954,
+ "step": 1908
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4204354633712312e-05,
+ "loss": 0.6802,
+ "step": 1909
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.419848624257476e-05,
+ "loss": 0.6639,
+ "step": 1910
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4192616095627506e-05,
+ "loss": 0.6984,
+ "step": 1911
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4186744195325455e-05,
+ "loss": 0.7966,
+ "step": 1912
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4180870544124236e-05,
+ "loss": 0.7628,
+ "step": 1913
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4174995144480214e-05,
+ "loss": 0.7058,
+ "step": 1914
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416911799885049e-05,
+ "loss": 0.7872,
+ "step": 1915
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4163239109692881e-05,
+ "loss": 0.7819,
+ "step": 1916
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4157358479465949e-05,
+ "loss": 0.799,
+ "step": 1917
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4151476110628972e-05,
+ "loss": 0.751,
+ "step": 1918
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4145592005641967e-05,
+ "loss": 0.8055,
+ "step": 1919
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4139706166965665e-05,
+ "loss": 0.7669,
+ "step": 1920
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.413381859706153e-05,
+ "loss": 0.6955,
+ "step": 1921
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4127929298391749e-05,
+ "loss": 0.6285,
+ "step": 1922
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412203827341923e-05,
+ "loss": 0.7434,
+ "step": 1923
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4116145524607603e-05,
+ "loss": 0.7092,
+ "step": 1924
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4110251054421218e-05,
+ "loss": 0.6904,
+ "step": 1925
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4104354865325145e-05,
+ "loss": 0.6703,
+ "step": 1926
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4098456959785178e-05,
+ "loss": 0.7235,
+ "step": 1927
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4092557340267824e-05,
+ "loss": 0.7262,
+ "step": 1928
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4086656009240308e-05,
+ "loss": 0.6985,
+ "step": 1929
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4080752969170568e-05,
+ "loss": 0.6737,
+ "step": 1930
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4074848222527262e-05,
+ "loss": 0.7576,
+ "step": 1931
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4068941771779757e-05,
+ "loss": 0.7851,
+ "step": 1932
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4063033619398136e-05,
+ "loss": 0.8381,
+ "step": 1933
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4057123767853194e-05,
+ "loss": 0.7609,
+ "step": 1934
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4051212219616427e-05,
+ "loss": 0.712,
+ "step": 1935
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4045298977160058e-05,
+ "loss": 0.7459,
+ "step": 1936
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4039384042957003e-05,
+ "loss": 0.7596,
+ "step": 1937
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4033467419480897e-05,
+ "loss": 0.6932,
+ "step": 1938
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4027549109206064e-05,
+ "loss": 0.7911,
+ "step": 1939
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4021629114607558e-05,
+ "loss": 0.7871,
+ "step": 1940
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4015707438161122e-05,
+ "loss": 0.6614,
+ "step": 1941
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4009784082343199e-05,
+ "loss": 0.7054,
+ "step": 1942
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4003859049630943e-05,
+ "loss": 0.7018,
+ "step": 1943
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.399793234250221e-05,
+ "loss": 0.7203,
+ "step": 1944
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.399200396343555e-05,
+ "loss": 0.7139,
+ "step": 1945
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3986073914910215e-05,
+ "loss": 0.714,
+ "step": 1946
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3980142199406155e-05,
+ "loss": 0.6704,
+ "step": 1947
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3974208819404016e-05,
+ "loss": 0.697,
+ "step": 1948
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3968273777385146e-05,
+ "loss": 0.7572,
+ "step": 1949
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3962337075831583e-05,
+ "loss": 0.7306,
+ "step": 1950
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3956398717226055e-05,
+ "loss": 0.6538,
+ "step": 1951
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3950458704051986e-05,
+ "loss": 0.7644,
+ "step": 1952
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.39445170387935e-05,
+ "loss": 0.7049,
+ "step": 1953
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3938573723935403e-05,
+ "loss": 0.7035,
+ "step": 1954
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3932628761963192e-05,
+ "loss": 0.7539,
+ "step": 1955
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3926682155363053e-05,
+ "loss": 0.7505,
+ "step": 1956
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3920733906621861e-05,
+ "loss": 0.7742,
+ "step": 1957
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3914784018227186e-05,
+ "loss": 0.7337,
+ "step": 1958
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3908832492667261e-05,
+ "loss": 0.5887,
+ "step": 1959
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3902879332431027e-05,
+ "loss": 0.7003,
+ "step": 1960
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3896924540008098e-05,
+ "loss": 0.7892,
+ "step": 1961
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3890968117888772e-05,
+ "loss": 0.7659,
+ "step": 1962
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3885010068564026e-05,
+ "loss": 0.6942,
+ "step": 1963
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3879050394525525e-05,
+ "loss": 0.7118,
+ "step": 1964
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3873089098265606e-05,
+ "loss": 0.7484,
+ "step": 1965
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3867126182277285e-05,
+ "loss": 0.8038,
+ "step": 1966
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.386116164905426e-05,
+ "loss": 0.7846,
+ "step": 1967
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.38551955010909e-05,
+ "loss": 0.7913,
+ "step": 1968
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3849227740882257e-05,
+ "loss": 0.6584,
+ "step": 1969
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3843258370924046e-05,
+ "loss": 0.7246,
+ "step": 1970
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3837287393712666e-05,
+ "loss": 0.7925,
+ "step": 1971
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3831314811745185e-05,
+ "loss": 0.6766,
+ "step": 1972
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3825340627519337e-05,
+ "loss": 0.6874,
+ "step": 1973
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3819364843533527e-05,
+ "loss": 0.8054,
+ "step": 1974
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3813387462286839e-05,
+ "loss": 0.7218,
+ "step": 1975
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3807408486279012e-05,
+ "loss": 0.8085,
+ "step": 1976
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3801427918010461e-05,
+ "loss": 0.7866,
+ "step": 1977
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3795445759982262e-05,
+ "loss": 0.8057,
+ "step": 1978
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.378946201469616e-05,
+ "loss": 0.7698,
+ "step": 1979
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3783476684654555e-05,
+ "loss": 0.735,
+ "step": 1980
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3777489772360521e-05,
+ "loss": 0.7624,
+ "step": 1981
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3771501280317791e-05,
+ "loss": 0.751,
+ "step": 1982
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3765511211030753e-05,
+ "loss": 0.7781,
+ "step": 1983
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3759519567004454e-05,
+ "loss": 0.7455,
+ "step": 1984
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3753526350744611e-05,
+ "loss": 0.8627,
+ "step": 1985
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3747531564757584e-05,
+ "loss": 0.7955,
+ "step": 1986
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3741535211550402e-05,
+ "loss": 0.8166,
+ "step": 1987
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3735537293630741e-05,
+ "loss": 0.7756,
+ "step": 1988
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3729537813506932e-05,
+ "loss": 0.6415,
+ "step": 1989
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3723536773687966e-05,
+ "loss": 0.708,
+ "step": 1990
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3717534176683485e-05,
+ "loss": 0.7264,
+ "step": 1991
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3711530025003768e-05,
+ "loss": 0.6798,
+ "step": 1992
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3705524321159764e-05,
+ "loss": 0.7799,
+ "step": 1993
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3699517067663057e-05,
+ "loss": 0.8123,
+ "step": 1994
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.369350826702589e-05,
+ "loss": 0.7705,
+ "step": 1995
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.368749792176114e-05,
+ "loss": 0.6495,
+ "step": 1996
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3681486034382347e-05,
+ "loss": 0.7111,
+ "step": 1997
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3675472607403677e-05,
+ "loss": 0.7892,
+ "step": 1998
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3669457643339956e-05,
+ "loss": 0.65,
+ "step": 1999
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3663441144706642e-05,
+ "loss": 0.6031,
+ "step": 2000
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3657423114019837e-05,
+ "loss": 0.6994,
+ "step": 2001
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.365140355379629e-05,
+ "loss": 0.7785,
+ "step": 2002
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3645382466553381e-05,
+ "loss": 0.7484,
+ "step": 2003
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3639359854809135e-05,
+ "loss": 0.785,
+ "step": 2004
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3633335721082215e-05,
+ "loss": 0.6519,
+ "step": 2005
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3627310067891913e-05,
+ "loss": 0.7343,
+ "step": 2006
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.362128289775816e-05,
+ "loss": 0.7537,
+ "step": 2007
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3615254213201527e-05,
+ "loss": 0.6879,
+ "step": 2008
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3609224016743213e-05,
+ "loss": 0.7787,
+ "step": 2009
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3603192310905046e-05,
+ "loss": 0.7949,
+ "step": 2010
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3597159098209493e-05,
+ "loss": 0.7246,
+ "step": 2011
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3591124381179648e-05,
+ "loss": 0.7628,
+ "step": 2012
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3585088162339233e-05,
+ "loss": 0.8011,
+ "step": 2013
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3579050444212599e-05,
+ "loss": 0.713,
+ "step": 2014
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.357301122932472e-05,
+ "loss": 0.8059,
+ "step": 2015
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3566970520201202e-05,
+ "loss": 0.5912,
+ "step": 2016
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3560928319368278e-05,
+ "loss": 0.6646,
+ "step": 2017
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3554884629352794e-05,
+ "loss": 0.7063,
+ "step": 2018
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3548839452682225e-05,
+ "loss": 0.6775,
+ "step": 2019
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3542792791884675e-05,
+ "loss": 0.7512,
+ "step": 2020
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.353674464948885e-05,
+ "loss": 0.6781,
+ "step": 2021
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3530695028024099e-05,
+ "loss": 0.7358,
+ "step": 2022
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3524643930020374e-05,
+ "loss": 0.7219,
+ "step": 2023
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3518591358008243e-05,
+ "loss": 0.7061,
+ "step": 2024
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.35125373145189e-05,
+ "loss": 0.7552,
+ "step": 2025
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3506481802084151e-05,
+ "loss": 0.8161,
+ "step": 2026
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3500424823236413e-05,
+ "loss": 0.7647,
+ "step": 2027
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3494366380508717e-05,
+ "loss": 0.5838,
+ "step": 2028
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3488306476434714e-05,
+ "loss": 0.6847,
+ "step": 2029
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3482245113548649e-05,
+ "loss": 0.8177,
+ "step": 2030
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3476182294385398e-05,
+ "loss": 0.8101,
+ "step": 2031
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3470118021480435e-05,
+ "loss": 0.8294,
+ "step": 2032
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3464052297369838e-05,
+ "loss": 0.7677,
+ "step": 2033
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3457985124590298e-05,
+ "loss": 0.6888,
+ "step": 2034
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3451916505679115e-05,
+ "loss": 0.711,
+ "step": 2035
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3445846443174184e-05,
+ "loss": 0.6605,
+ "step": 2036
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.343977493961401e-05,
+ "loss": 0.808,
+ "step": 2037
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3433701997537704e-05,
+ "loss": 0.7093,
+ "step": 2038
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3427627619484968e-05,
+ "loss": 0.7053,
+ "step": 2039
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.342155180799612e-05,
+ "loss": 0.77,
+ "step": 2040
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3415474565612059e-05,
+ "loss": 0.7812,
+ "step": 2041
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3409395894874295e-05,
+ "loss": 0.6949,
+ "step": 2042
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3403315798324935e-05,
+ "loss": 0.7036,
+ "step": 2043
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3397234278506675e-05,
+ "loss": 0.5875,
+ "step": 2044
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3391151337962813e-05,
+ "loss": 0.7726,
+ "step": 2045
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3385066979237235e-05,
+ "loss": 0.6296,
+ "step": 2046
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3378981204874432e-05,
+ "loss": 0.6643,
+ "step": 2047
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.337289401741947e-05,
+ "loss": 0.6847,
+ "step": 2048
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3366805419418026e-05,
+ "loss": 0.6089,
+ "step": 2049
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3360715413416341e-05,
+ "loss": 0.7091,
+ "step": 2050
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3354624001961267e-05,
+ "loss": 0.7252,
+ "step": 2051
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.334853118760024e-05,
+ "loss": 0.6989,
+ "step": 2052
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3342436972881272e-05,
+ "loss": 0.6652,
+ "step": 2053
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3336341360352971e-05,
+ "loss": 0.6644,
+ "step": 2054
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3330244352564527e-05,
+ "loss": 0.7361,
+ "step": 2055
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3324145952065712e-05,
+ "loss": 0.7361,
+ "step": 2056
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3318046161406876e-05,
+ "loss": 0.6891,
+ "step": 2057
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3311944983138964e-05,
+ "loss": 0.6263,
+ "step": 2058
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3305842419813488e-05,
+ "loss": 0.7505,
+ "step": 2059
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3299738473982546e-05,
+ "loss": 0.7621,
+ "step": 2060
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3293633148198812e-05,
+ "loss": 0.649,
+ "step": 2061
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.328752644501553e-05,
+ "loss": 0.7048,
+ "step": 2062
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3281418366986535e-05,
+ "loss": 0.6321,
+ "step": 2063
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3275308916666235e-05,
+ "loss": 0.6873,
+ "step": 2064
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3269198096609592e-05,
+ "loss": 0.7415,
+ "step": 2065
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3263085909372162e-05,
+ "loss": 0.7304,
+ "step": 2066
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3256972357510068e-05,
+ "loss": 0.7557,
+ "step": 2067
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3250857443579997e-05,
+ "loss": 0.7876,
+ "step": 2068
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.324474117013921e-05,
+ "loss": 0.6398,
+ "step": 2069
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3238623539745544e-05,
+ "loss": 0.59,
+ "step": 2070
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3232504554957385e-05,
+ "loss": 0.7579,
+ "step": 2071
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3226384218333703e-05,
+ "loss": 0.7537,
+ "step": 2072
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.322026253243403e-05,
+ "loss": 0.7193,
+ "step": 2073
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.321413949981845e-05,
+ "loss": 0.7557,
+ "step": 2074
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3208015123047623e-05,
+ "loss": 0.6979,
+ "step": 2075
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.320188940468277e-05,
+ "loss": 0.7598,
+ "step": 2076
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.319576234728567e-05,
+ "loss": 0.6755,
+ "step": 2077
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3189633953418661e-05,
+ "loss": 0.6662,
+ "step": 2078
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3183504225644643e-05,
+ "loss": 0.7223,
+ "step": 2079
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3177373166527065e-05,
+ "loss": 0.7523,
+ "step": 2080
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3171240778629952e-05,
+ "loss": 0.7761,
+ "step": 2081
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3165107064517866e-05,
+ "loss": 0.675,
+ "step": 2082
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3158972026755927e-05,
+ "loss": 0.6947,
+ "step": 2083
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3152835667909815e-05,
+ "loss": 0.5929,
+ "step": 2084
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3146697990545762e-05,
+ "loss": 0.7178,
+ "step": 2085
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3140558997230547e-05,
+ "loss": 0.6845,
+ "step": 2086
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3134418690531498e-05,
+ "loss": 0.7324,
+ "step": 2087
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.31282770730165e-05,
+ "loss": 0.7265,
+ "step": 2088
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3122134147253975e-05,
+ "loss": 0.765,
+ "step": 2089
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.311598991581291e-05,
+ "loss": 0.8113,
+ "step": 2090
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3109844381262816e-05,
+ "loss": 0.7642,
+ "step": 2091
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.310369754617376e-05,
+ "loss": 0.679,
+ "step": 2092
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3097549413116358e-05,
+ "loss": 0.7718,
+ "step": 2093
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.309139998466176e-05,
+ "loss": 0.6791,
+ "step": 2094
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3085249263381661e-05,
+ "loss": 0.7474,
+ "step": 2095
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3079097251848301e-05,
+ "loss": 0.8297,
+ "step": 2096
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3072943952634446e-05,
+ "loss": 0.7944,
+ "step": 2097
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3066789368313417e-05,
+ "loss": 0.6128,
+ "step": 2098
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3060633501459065e-05,
+ "loss": 0.6402,
+ "step": 2099
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3054476354645778e-05,
+ "loss": 0.678,
+ "step": 2100
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3048317930448472e-05,
+ "loss": 0.7248,
+ "step": 2101
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3042158231442614e-05,
+ "loss": 0.7635,
+ "step": 2102
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3035997260204187e-05,
+ "loss": 0.768,
+ "step": 2103
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3029835019309713e-05,
+ "loss": 0.8443,
+ "step": 2104
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3023671511336253e-05,
+ "loss": 0.6445,
+ "step": 2105
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.301750673886138e-05,
+ "loss": 0.7216,
+ "step": 2106
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.301134070446321e-05,
+ "loss": 0.7845,
+ "step": 2107
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3005173410720389e-05,
+ "loss": 0.7216,
+ "step": 2108
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2999004860212071e-05,
+ "loss": 0.6273,
+ "step": 2109
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2992835055517959e-05,
+ "loss": 0.6979,
+ "step": 2110
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2986663999218264e-05,
+ "loss": 0.6969,
+ "step": 2111
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2980491693893721e-05,
+ "loss": 0.8033,
+ "step": 2112
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2974318142125604e-05,
+ "loss": 0.6297,
+ "step": 2113
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.296814334649569e-05,
+ "loss": 0.592,
+ "step": 2114
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2961967309586279e-05,
+ "loss": 0.811,
+ "step": 2115
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2955790033980199e-05,
+ "loss": 0.7711,
+ "step": 2116
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2949611522260788e-05,
+ "loss": 0.7263,
+ "step": 2117
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2943431777011903e-05,
+ "loss": 0.754,
+ "step": 2118
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2937250800817922e-05,
+ "loss": 0.6612,
+ "step": 2119
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2931068596263732e-05,
+ "loss": 0.6055,
+ "step": 2120
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2924885165934728e-05,
+ "loss": 0.6808,
+ "step": 2121
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2918700512416836e-05,
+ "loss": 0.5397,
+ "step": 2122
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2912514638296472e-05,
+ "loss": 0.7296,
+ "step": 2123
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2906327546160577e-05,
+ "loss": 0.7999,
+ "step": 2124
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2900139238596598e-05,
+ "loss": 0.7864,
+ "step": 2125
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.289394971819249e-05,
+ "loss": 0.7367,
+ "step": 2126
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.288775898753671e-05,
+ "loss": 0.7638,
+ "step": 2127
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2881567049218227e-05,
+ "loss": 0.8054,
+ "step": 2128
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.287537390582652e-05,
+ "loss": 0.6788,
+ "step": 2129
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2869179559951555e-05,
+ "loss": 0.7362,
+ "step": 2130
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.286298401418382e-05,
+ "loss": 0.6832,
+ "step": 2131
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2856787271114292e-05,
+ "loss": 0.6962,
+ "step": 2132
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2850589333334455e-05,
+ "loss": 0.59,
+ "step": 2133
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.284439020343629e-05,
+ "loss": 0.641,
+ "step": 2134
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2838189884012275e-05,
+ "loss": 0.7168,
+ "step": 2135
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2831988377655393e-05,
+ "loss": 0.7333,
+ "step": 2136
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2825785686959113e-05,
+ "loss": 0.7667,
+ "step": 2137
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2819581814517407e-05,
+ "loss": 0.7826,
+ "step": 2138
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2813376762924735e-05,
+ "loss": 0.6693,
+ "step": 2139
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2807170534776062e-05,
+ "loss": 0.7674,
+ "step": 2140
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2800963132666825e-05,
+ "loss": 0.6577,
+ "step": 2141
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2794754559192972e-05,
+ "loss": 0.7828,
+ "step": 2142
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.278854481695093e-05,
+ "loss": 0.7032,
+ "step": 2143
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2782333908537617e-05,
+ "loss": 0.6527,
+ "step": 2144
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.277612183655044e-05,
+ "loss": 0.6888,
+ "step": 2145
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2769908603587294e-05,
+ "loss": 0.7082,
+ "step": 2146
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.276369421224655e-05,
+ "loss": 0.7831,
+ "step": 2147
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2757478665127071e-05,
+ "loss": 0.7457,
+ "step": 2148
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2751261964828214e-05,
+ "loss": 0.8446,
+ "step": 2149
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2745044113949795e-05,
+ "loss": 0.7331,
+ "step": 2150
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.273882511509213e-05,
+ "loss": 0.5626,
+ "step": 2151
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2732604970856005e-05,
+ "loss": 0.7081,
+ "step": 2152
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.272638368384269e-05,
+ "loss": 0.7053,
+ "step": 2153
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2720161256653931e-05,
+ "loss": 0.7945,
+ "step": 2154
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2713937691891953e-05,
+ "loss": 0.7494,
+ "step": 2155
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.270771299215945e-05,
+ "loss": 0.6674,
+ "step": 2156
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2701487160059601e-05,
+ "loss": 0.753,
+ "step": 2157
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.269526019819605e-05,
+ "loss": 0.7457,
+ "step": 2158
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2689032109172914e-05,
+ "loss": 0.6698,
+ "step": 2159
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.268280289559479e-05,
+ "loss": 0.6449,
+ "step": 2160
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2676572560066736e-05,
+ "loss": 0.8134,
+ "step": 2161
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2670341105194282e-05,
+ "loss": 0.8067,
+ "step": 2162
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2664108533583428e-05,
+ "loss": 0.6773,
+ "step": 2163
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2657874847840639e-05,
+ "loss": 0.7046,
+ "step": 2164
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2651640050572845e-05,
+ "loss": 0.6981,
+ "step": 2165
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2645404144387444e-05,
+ "loss": 0.8143,
+ "step": 2166
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2639167131892294e-05,
+ "loss": 0.7862,
+ "step": 2167
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2632929015695719e-05,
+ "loss": 0.6082,
+ "step": 2168
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2626689798406506e-05,
+ "loss": 0.6971,
+ "step": 2169
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2620449482633894e-05,
+ "loss": 0.7255,
+ "step": 2170
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2614208070987592e-05,
+ "loss": 0.6933,
+ "step": 2171
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2607965566077764e-05,
+ "loss": 0.5531,
+ "step": 2172
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2601721970515024e-05,
+ "loss": 0.6882,
+ "step": 2173
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.259547728691045e-05,
+ "loss": 0.8107,
+ "step": 2174
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.258923151787557e-05,
+ "loss": 0.7222,
+ "step": 2175
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2582984666022375e-05,
+ "loss": 0.6573,
+ "step": 2176
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.25767367339633e-05,
+ "loss": 0.741,
+ "step": 2177
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2570487724311233e-05,
+ "loss": 0.8335,
+ "step": 2178
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2564237639679512e-05,
+ "loss": 0.742,
+ "step": 2179
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2557986482681929e-05,
+ "loss": 0.7385,
+ "step": 2180
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2551734255932728e-05,
+ "loss": 0.6047,
+ "step": 2181
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2545480962046583e-05,
+ "loss": 0.7767,
+ "step": 2182
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2539226603638633e-05,
+ "loss": 0.6946,
+ "step": 2183
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2532971183324451e-05,
+ "loss": 0.7623,
+ "step": 2184
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.252671470372006e-05,
+ "loss": 0.7435,
+ "step": 2185
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2520457167441922e-05,
+ "loss": 0.6657,
+ "step": 2186
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2514198577106948e-05,
+ "loss": 0.8047,
+ "step": 2187
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2507938935332478e-05,
+ "loss": 0.6522,
+ "step": 2188
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.25016782447363e-05,
+ "loss": 0.7137,
+ "step": 2189
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2495416507936642e-05,
+ "loss": 0.7507,
+ "step": 2190
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2489153727552164e-05,
+ "loss": 0.7567,
+ "step": 2191
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2482889906201965e-05,
+ "loss": 0.7874,
+ "step": 2192
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.247662504650558e-05,
+ "loss": 0.7565,
+ "step": 2193
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2470359151082974e-05,
+ "loss": 0.777,
+ "step": 2194
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2464092222554554e-05,
+ "loss": 0.661,
+ "step": 2195
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2457824263541152e-05,
+ "loss": 0.6562,
+ "step": 2196
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2451555276664032e-05,
+ "loss": 0.7081,
+ "step": 2197
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2445285264544884e-05,
+ "loss": 0.7159,
+ "step": 2198
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2439014229805836e-05,
+ "loss": 0.7269,
+ "step": 2199
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.243274217506944e-05,
+ "loss": 0.748,
+ "step": 2200
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2426469102958671e-05,
+ "loss": 0.7831,
+ "step": 2201
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2420195016096934e-05,
+ "loss": 0.7496,
+ "step": 2202
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2413919917108053e-05,
+ "loss": 0.6047,
+ "step": 2203
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.240764380861628e-05,
+ "loss": 0.7019,
+ "step": 2204
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2401366693246289e-05,
+ "loss": 0.5762,
+ "step": 2205
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2395088573623172e-05,
+ "loss": 0.7803,
+ "step": 2206
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2388809452372442e-05,
+ "loss": 0.7015,
+ "step": 2207
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2382529332120034e-05,
+ "loss": 0.6879,
+ "step": 2208
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2376248215492297e-05,
+ "loss": 0.7328,
+ "step": 2209
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2369966105115998e-05,
+ "loss": 0.7672,
+ "step": 2210
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2363683003618322e-05,
+ "loss": 0.744,
+ "step": 2211
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2357398913626861e-05,
+ "loss": 0.7612,
+ "step": 2212
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.235111383776963e-05,
+ "loss": 0.6657,
+ "step": 2213
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2344827778675052e-05,
+ "loss": 0.6987,
+ "step": 2214
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.233854073897196e-05,
+ "loss": 0.7879,
+ "step": 2215
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2332252721289594e-05,
+ "loss": 0.7182,
+ "step": 2216
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2325963728257616e-05,
+ "loss": 0.7618,
+ "step": 2217
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2319673762506079e-05,
+ "loss": 0.6691,
+ "step": 2218
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2313382826665456e-05,
+ "loss": 0.649,
+ "step": 2219
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2307090923366613e-05,
+ "loss": 0.6144,
+ "step": 2220
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2300798055240836e-05,
+ "loss": 0.6692,
+ "step": 2221
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2294504224919807e-05,
+ "loss": 0.7093,
+ "step": 2222
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2288209435035605e-05,
+ "loss": 0.7279,
+ "step": 2223
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2281913688220716e-05,
+ "loss": 0.7151,
+ "step": 2224
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2275616987108028e-05,
+ "loss": 0.7243,
+ "step": 2225
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2269319334330823e-05,
+ "loss": 0.6653,
+ "step": 2226
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2263020732522786e-05,
+ "loss": 0.7394,
+ "step": 2227
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2256721184317994e-05,
+ "loss": 0.6748,
+ "step": 2228
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2250420692350921e-05,
+ "loss": 0.6734,
+ "step": 2229
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2244119259256442e-05,
+ "loss": 0.6808,
+ "step": 2230
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2237816887669817e-05,
+ "loss": 0.7677,
+ "step": 2231
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2231513580226704e-05,
+ "loss": 0.7185,
+ "step": 2232
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2225209339563144e-05,
+ "loss": 0.5664,
+ "step": 2233
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2218904168315584e-05,
+ "loss": 0.7025,
+ "step": 2234
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2212598069120845e-05,
+ "loss": 0.6119,
+ "step": 2235
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2206291044616142e-05,
+ "loss": 0.7777,
+ "step": 2236
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2199983097439079e-05,
+ "loss": 0.593,
+ "step": 2237
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2193674230227638e-05,
+ "loss": 0.6921,
+ "step": 2238
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2187364445620203e-05,
+ "loss": 0.7207,
+ "step": 2239
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2181053746255518e-05,
+ "loss": 0.5799,
+ "step": 2240
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2174742134772726e-05,
+ "loss": 0.6795,
+ "step": 2241
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.216842961381135e-05,
+ "loss": 0.6774,
+ "step": 2242
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2162116186011285e-05,
+ "loss": 0.734,
+ "step": 2243
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2155801854012816e-05,
+ "loss": 0.7493,
+ "step": 2244
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2149486620456599e-05,
+ "loss": 0.6191,
+ "step": 2245
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2143170487983667e-05,
+ "loss": 0.7198,
+ "step": 2246
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2136853459235427e-05,
+ "loss": 0.6223,
+ "step": 2247
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2130535536853677e-05,
+ "loss": 0.6741,
+ "step": 2248
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2124216723480565e-05,
+ "loss": 0.7587,
+ "step": 2249
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2117897021758626e-05,
+ "loss": 0.6985,
+ "step": 2250
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2111576434330767e-05,
+ "loss": 0.7481,
+ "step": 2251
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2105254963840256e-05,
+ "loss": 0.6576,
+ "step": 2252
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209893261293074e-05,
+ "loss": 0.6614,
+ "step": 2253
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209260938424623e-05,
+ "loss": 0.6768,
+ "step": 2254
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2086285280431103e-05,
+ "loss": 0.7177,
+ "step": 2255
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.20799603041301e-05,
+ "loss": 0.6572,
+ "step": 2256
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2073634457988342e-05,
+ "loss": 0.7219,
+ "step": 2257
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2067307744651288e-05,
+ "loss": 0.7578,
+ "step": 2258
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2060980166764782e-05,
+ "loss": 0.7622,
+ "step": 2259
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2054651726975022e-05,
+ "loss": 0.5583,
+ "step": 2260
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2048322427928556e-05,
+ "loss": 0.7103,
+ "step": 2261
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2041992272272311e-05,
+ "loss": 0.7341,
+ "step": 2262
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2035661262653562e-05,
+ "loss": 0.6622,
+ "step": 2263
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2029329401719935e-05,
+ "loss": 0.6639,
+ "step": 2264
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2022996692119424e-05,
+ "loss": 0.8021,
+ "step": 2265
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2016663136500368e-05,
+ "loss": 0.7579,
+ "step": 2266
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2010328737511466e-05,
+ "loss": 0.6736,
+ "step": 2267
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.200399349780177e-05,
+ "loss": 0.6682,
+ "step": 2268
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1997657420020678e-05,
+ "loss": 0.7552,
+ "step": 2269
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1991320506817944e-05,
+ "loss": 0.7534,
+ "step": 2270
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984982760843666e-05,
+ "loss": 0.787,
+ "step": 2271
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.19786441847483e-05,
+ "loss": 0.5931,
+ "step": 2272
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1972304781182636e-05,
+ "loss": 0.7957,
+ "step": 2273
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.196596455279782e-05,
+ "loss": 0.6103,
+ "step": 2274
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.195962350224534e-05,
+ "loss": 0.6045,
+ "step": 2275
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1953281632177025e-05,
+ "loss": 0.7442,
+ "step": 2276
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1946938945245052e-05,
+ "loss": 0.6243,
+ "step": 2277
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1940595444101938e-05,
+ "loss": 0.8077,
+ "step": 2278
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1934251131400532e-05,
+ "loss": 0.7286,
+ "step": 2279
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.192790600979404e-05,
+ "loss": 0.6568,
+ "step": 2280
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1921560081935986e-05,
+ "loss": 0.5841,
+ "step": 2281
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1915213350480248e-05,
+ "loss": 0.7518,
+ "step": 2282
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1908865818081032e-05,
+ "loss": 0.6341,
+ "step": 2283
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1902517487392878e-05,
+ "loss": 0.7714,
+ "step": 2284
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1896168361070664e-05,
+ "loss": 0.8227,
+ "step": 2285
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.18898184417696e-05,
+ "loss": 0.7662,
+ "step": 2286
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1883467732145224e-05,
+ "loss": 0.6511,
+ "step": 2287
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1877116234853402e-05,
+ "loss": 0.6578,
+ "step": 2288
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1870763952550347e-05,
+ "loss": 0.7405,
+ "step": 2289
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1864410887892578e-05,
+ "loss": 0.7544,
+ "step": 2290
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1858057043536955e-05,
+ "loss": 0.6959,
+ "step": 2291
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1851702422140656e-05,
+ "loss": 0.6857,
+ "step": 2292
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1845347026361192e-05,
+ "loss": 0.7152,
+ "step": 2293
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.183899085885639e-05,
+ "loss": 0.8357,
+ "step": 2294
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1832633922284407e-05,
+ "loss": 0.7542,
+ "step": 2295
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1826276219303715e-05,
+ "loss": 0.7021,
+ "step": 2296
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1819917752573109e-05,
+ "loss": 0.6663,
+ "step": 2297
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1813558524751708e-05,
+ "loss": 0.6311,
+ "step": 2298
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1807198538498944e-05,
+ "loss": 0.7515,
+ "step": 2299
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1800837796474562e-05,
+ "loss": 0.7194,
+ "step": 2300
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.179447630133864e-05,
+ "loss": 0.6793,
+ "step": 2301
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1788114055751546e-05,
+ "loss": 0.6986,
+ "step": 2302
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1781751062373982e-05,
+ "loss": 0.6435,
+ "step": 2303
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1775387323866961e-05,
+ "loss": 0.6593,
+ "step": 2304
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1769022842891794e-05,
+ "loss": 0.7491,
+ "step": 2305
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.176265762211011e-05,
+ "loss": 0.6714,
+ "step": 2306
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1756291664183858e-05,
+ "loss": 0.7577,
+ "step": 2307
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1749924971775279e-05,
+ "loss": 0.7047,
+ "step": 2308
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1743557547546928e-05,
+ "loss": 0.7561,
+ "step": 2309
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1737189394161673e-05,
+ "loss": 0.5751,
+ "step": 2310
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1730820514282669e-05,
+ "loss": 0.7222,
+ "step": 2311
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1724450910573392e-05,
+ "loss": 0.7364,
+ "step": 2312
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1718080585697617e-05,
+ "loss": 0.7853,
+ "step": 2313
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1711709542319412e-05,
+ "loss": 0.7393,
+ "step": 2314
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1705337783103155e-05,
+ "loss": 0.7658,
+ "step": 2315
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.169896531071352e-05,
+ "loss": 0.7302,
+ "step": 2316
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1692592127815475e-05,
+ "loss": 0.8297,
+ "step": 2317
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1686218237074294e-05,
+ "loss": 0.686,
+ "step": 2318
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1679843641155544e-05,
+ "loss": 0.7651,
+ "step": 2319
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1673468342725075e-05,
+ "loss": 0.6528,
+ "step": 2320
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1667092344449053e-05,
+ "loss": 0.6931,
+ "step": 2321
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.166071564899392e-05,
+ "loss": 0.7617,
+ "step": 2322
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1654338259026413e-05,
+ "loss": 0.7368,
+ "step": 2323
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.164796017721356e-05,
+ "loss": 0.6499,
+ "step": 2324
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1641581406222682e-05,
+ "loss": 0.6977,
+ "step": 2325
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1635201948721381e-05,
+ "loss": 0.6376,
+ "step": 2326
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1628821807377557e-05,
+ "loss": 0.7226,
+ "step": 2327
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1622440984859384e-05,
+ "loss": 0.8391,
+ "step": 2328
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1616059483835325e-05,
+ "loss": 0.6964,
+ "step": 2329
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1609677306974134e-05,
+ "loss": 0.6211,
+ "step": 2330
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1603294456944835e-05,
+ "loss": 0.6627,
+ "step": 2331
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1596910936416746e-05,
+ "loss": 0.7108,
+ "step": 2332
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1590526748059453e-05,
+ "loss": 0.7393,
+ "step": 2333
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1584141894542832e-05,
+ "loss": 0.7127,
+ "step": 2334
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1577756378537033e-05,
+ "loss": 0.6516,
+ "step": 2335
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1571370202712484e-05,
+ "loss": 0.669,
+ "step": 2336
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1564983369739884e-05,
+ "loss": 0.6698,
+ "step": 2337
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.155859588229021e-05,
+ "loss": 0.752,
+ "step": 2338
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.155220774303472e-05,
+ "loss": 0.6824,
+ "step": 2339
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1545818954644934e-05,
+ "loss": 0.5069,
+ "step": 2340
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1539429519792647e-05,
+ "loss": 0.6701,
+ "step": 2341
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1533039441149927e-05,
+ "loss": 0.6692,
+ "step": 2342
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1526648721389105e-05,
+ "loss": 0.8447,
+ "step": 2343
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1520257363182787e-05,
+ "loss": 0.7112,
+ "step": 2344
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1513865369203844e-05,
+ "loss": 0.6817,
+ "step": 2345
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1507472742125412e-05,
+ "loss": 0.6452,
+ "step": 2346
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1501079484620887e-05,
+ "loss": 0.7396,
+ "step": 2347
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1494685599363943e-05,
+ "loss": 0.747,
+ "step": 2348
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1488291089028501e-05,
+ "loss": 0.7884,
+ "step": 2349
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1481895956288746e-05,
+ "loss": 0.7886,
+ "step": 2350
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1475500203819135e-05,
+ "loss": 0.7296,
+ "step": 2351
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1469103834294366e-05,
+ "loss": 0.7319,
+ "step": 2352
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1462706850389413e-05,
+ "loss": 0.7127,
+ "step": 2353
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1456309254779498e-05,
+ "loss": 0.7289,
+ "step": 2354
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1449911050140097e-05,
+ "loss": 0.7606,
+ "step": 2355
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1443512239146941e-05,
+ "loss": 0.6147,
+ "step": 2356
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1437112824476025e-05,
+ "loss": 0.7865,
+ "step": 2357
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1430712808803579e-05,
+ "loss": 0.6812,
+ "step": 2358
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.14243121948061e-05,
+ "loss": 0.7423,
+ "step": 2359
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1417910985160331e-05,
+ "loss": 0.7279,
+ "step": 2360
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1411509182543253e-05,
+ "loss": 0.7178,
+ "step": 2361
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1405106789632115e-05,
+ "loss": 0.772,
+ "step": 2362
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1398703809104392e-05,
+ "loss": 0.6029,
+ "step": 2363
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1392300243637819e-05,
+ "loss": 0.7946,
+ "step": 2364
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1385896095910374e-05,
+ "loss": 0.7121,
+ "step": 2365
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.137949136860027e-05,
+ "loss": 0.7148,
+ "step": 2366
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1373086064385974e-05,
+ "loss": 0.7832,
+ "step": 2367
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1366680185946186e-05,
+ "loss": 0.7469,
+ "step": 2368
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1360273735959848e-05,
+ "loss": 0.7711,
+ "step": 2369
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1353866717106137e-05,
+ "loss": 0.7025,
+ "step": 2370
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1347459132064485e-05,
+ "loss": 0.6219,
+ "step": 2371
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1341050983514535e-05,
+ "loss": 0.6812,
+ "step": 2372
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1334642274136186e-05,
+ "loss": 0.7525,
+ "step": 2373
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1328233006609564e-05,
+ "loss": 0.7013,
+ "step": 2374
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1321823183615027e-05,
+ "loss": 0.7339,
+ "step": 2375
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1315412807833167e-05,
+ "loss": 0.7014,
+ "step": 2376
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.130900188194481e-05,
+ "loss": 0.7909,
+ "step": 2377
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1302590408631006e-05,
+ "loss": 0.8003,
+ "step": 2378
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1296178390573038e-05,
+ "loss": 0.7241,
+ "step": 2379
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1289765830452422e-05,
+ "loss": 0.7925,
+ "step": 2380
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283352730950886e-05,
+ "loss": 0.7437,
+ "step": 2381
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.12769390947504e-05,
+ "loss": 0.6737,
+ "step": 2382
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1270524924533146e-05,
+ "loss": 0.7629,
+ "step": 2383
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1264110222981535e-05,
+ "loss": 0.6255,
+ "step": 2384
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1257694992778199e-05,
+ "loss": 0.7011,
+ "step": 2385
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1251279236605997e-05,
+ "loss": 0.6586,
+ "step": 2386
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1244862957147993e-05,
+ "loss": 0.6889,
+ "step": 2387
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1238446157087484e-05,
+ "loss": 0.7276,
+ "step": 2388
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1232028839107985e-05,
+ "loss": 0.7164,
+ "step": 2389
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1225611005893213e-05,
+ "loss": 0.7072,
+ "step": 2390
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1219192660127117e-05,
+ "loss": 0.6508,
+ "step": 2391
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.121277380449385e-05,
+ "loss": 0.7417,
+ "step": 2392
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1206354441677782e-05,
+ "loss": 0.675,
+ "step": 2393
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1199934574363498e-05,
+ "loss": 0.7788,
+ "step": 2394
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1193514205235789e-05,
+ "loss": 0.6521,
+ "step": 2395
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1187093336979655e-05,
+ "loss": 0.7267,
+ "step": 2396
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1180671972280311e-05,
+ "loss": 0.7326,
+ "step": 2397
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1174250113823173e-05,
+ "loss": 0.7502,
+ "step": 2398
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1167827764293867e-05,
+ "loss": 0.7371,
+ "step": 2399
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1161404926378224e-05,
+ "loss": 0.6499,
+ "step": 2400
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.115498160276228e-05,
+ "loss": 0.7553,
+ "step": 2401
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1148557796132272e-05,
+ "loss": 0.7049,
+ "step": 2402
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1142133509174644e-05,
+ "loss": 0.7138,
+ "step": 2403
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1135708744576031e-05,
+ "loss": 0.7302,
+ "step": 2404
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1129283505023273e-05,
+ "loss": 0.6944,
+ "step": 2405
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.112285779320342e-05,
+ "loss": 0.6787,
+ "step": 2406
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116431611803696e-05,
+ "loss": 0.6746,
+ "step": 2407
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1110004963511543e-05,
+ "loss": 0.5877,
+ "step": 2408
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1103577851014587e-05,
+ "loss": 0.7257,
+ "step": 2409
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1097150277000647e-05,
+ "loss": 0.6553,
+ "step": 2410
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.109072224415774e-05,
+ "loss": 0.6866,
+ "step": 2411
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1084293755174083e-05,
+ "loss": 0.5847,
+ "step": 2412
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1077864812738061e-05,
+ "loss": 0.6509,
+ "step": 2413
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.107143541953827e-05,
+ "loss": 0.7417,
+ "step": 2414
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1065005578263483e-05,
+ "loss": 0.7605,
+ "step": 2415
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1058575291602665e-05,
+ "loss": 0.717,
+ "step": 2416
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1052144562244967e-05,
+ "loss": 0.7055,
+ "step": 2417
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1045713392879724e-05,
+ "loss": 0.7557,
+ "step": 2418
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1039281786196453e-05,
+ "loss": 0.6674,
+ "step": 2419
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1032849744884854e-05,
+ "loss": 0.723,
+ "step": 2420
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1026417271634821e-05,
+ "loss": 0.7662,
+ "step": 2421
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1019984369136409e-05,
+ "loss": 0.7087,
+ "step": 2422
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1013551040079866e-05,
+ "loss": 0.7268,
+ "step": 2423
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1007117287155612e-05,
+ "loss": 0.6792,
+ "step": 2424
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1000683113054251e-05,
+ "loss": 0.7293,
+ "step": 2425
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0994248520466555e-05,
+ "loss": 0.7395,
+ "step": 2426
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0987813512083476e-05,
+ "loss": 0.7925,
+ "step": 2427
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.098137809059614e-05,
+ "loss": 0.7754,
+ "step": 2428
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0974942258695838e-05,
+ "loss": 0.6661,
+ "step": 2429
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0968506019074048e-05,
+ "loss": 0.7428,
+ "step": 2430
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0962069374422407e-05,
+ "loss": 0.659,
+ "step": 2431
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0955632327432718e-05,
+ "loss": 0.7619,
+ "step": 2432
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0949194880796967e-05,
+ "loss": 0.73,
+ "step": 2433
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0942757037207295e-05,
+ "loss": 0.6889,
+ "step": 2434
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0936318799356009e-05,
+ "loss": 0.7573,
+ "step": 2435
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0929880169935592e-05,
+ "loss": 0.6773,
+ "step": 2436
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0923441151638676e-05,
+ "loss": 0.7541,
+ "step": 2437
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0917001747158064e-05,
+ "loss": 0.7819,
+ "step": 2438
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.091056195918672e-05,
+ "loss": 0.7022,
+ "step": 2439
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0904121790417767e-05,
+ "loss": 0.7972,
+ "step": 2440
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0897681243544492e-05,
+ "loss": 0.7263,
+ "step": 2441
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0891240321260328e-05,
+ "loss": 0.7273,
+ "step": 2442
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0884799026258879e-05,
+ "loss": 0.7594,
+ "step": 2443
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.08783573612339e-05,
+ "loss": 0.629,
+ "step": 2444
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0871915328879294e-05,
+ "loss": 0.6852,
+ "step": 2445
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0865472931889128e-05,
+ "loss": 0.6888,
+ "step": 2446
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.085903017295761e-05,
+ "loss": 0.5645,
+ "step": 2447
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0852587054779113e-05,
+ "loss": 0.7981,
+ "step": 2448
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0846143580048152e-05,
+ "loss": 0.6324,
+ "step": 2449
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0839699751459392e-05,
+ "loss": 0.6149,
+ "step": 2450
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.083325557170764e-05,
+ "loss": 0.7752,
+ "step": 2451
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0826811043487865e-05,
+ "loss": 0.7803,
+ "step": 2452
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0820366169495171e-05,
+ "loss": 0.829,
+ "step": 2453
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0813920952424806e-05,
+ "loss": 0.6872,
+ "step": 2454
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0807475394972164e-05,
+ "loss": 0.7426,
+ "step": 2455
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0801029499832783e-05,
+ "loss": 0.8226,
+ "step": 2456
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0794583269702341e-05,
+ "loss": 0.7343,
+ "step": 2457
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0788136707276652e-05,
+ "loss": 0.7627,
+ "step": 2458
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0781689815251682e-05,
+ "loss": 0.851,
+ "step": 2459
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.077524259632351e-05,
+ "loss": 0.7885,
+ "step": 2460
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0768795053188379e-05,
+ "loss": 0.7048,
+ "step": 2461
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0762347188542657e-05,
+ "loss": 0.7273,
+ "step": 2462
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0755899005082837e-05,
+ "loss": 0.6684,
+ "step": 2463
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0749450505505557e-05,
+ "loss": 0.7254,
+ "step": 2464
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0743001692507586e-05,
+ "loss": 0.7142,
+ "step": 2465
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.073655256878582e-05,
+ "loss": 0.6842,
+ "step": 2466
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0730103137037284e-05,
+ "loss": 0.6433,
+ "step": 2467
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0723653399959142e-05,
+ "loss": 0.6671,
+ "step": 2468
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0717203360248669e-05,
+ "loss": 0.6106,
+ "step": 2469
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0710753020603284e-05,
+ "loss": 0.5917,
+ "step": 2470
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.070430238372052e-05,
+ "loss": 0.6794,
+ "step": 2471
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0697851452298035e-05,
+ "loss": 0.704,
+ "step": 2472
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0691400229033617e-05,
+ "loss": 0.6623,
+ "step": 2473
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.068494871662517e-05,
+ "loss": 0.6023,
+ "step": 2474
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.067849691777072e-05,
+ "loss": 0.7381,
+ "step": 2475
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0672044835168414e-05,
+ "loss": 0.7063,
+ "step": 2476
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0665592471516518e-05,
+ "loss": 0.6768,
+ "step": 2477
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0659139829513413e-05,
+ "loss": 0.7305,
+ "step": 2478
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0652686911857599e-05,
+ "loss": 0.8366,
+ "step": 2479
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0646233721247692e-05,
+ "loss": 0.7292,
+ "step": 2480
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0639780260382417e-05,
+ "loss": 0.6402,
+ "step": 2481
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.063332653196062e-05,
+ "loss": 0.73,
+ "step": 2482
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.062687253868125e-05,
+ "loss": 0.7199,
+ "step": 2483
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0620418283243374e-05,
+ "loss": 0.7149,
+ "step": 2484
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0613963768346167e-05,
+ "loss": 0.5912,
+ "step": 2485
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0607508996688909e-05,
+ "loss": 0.5675,
+ "step": 2486
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.060105397097099e-05,
+ "loss": 0.7335,
+ "step": 2487
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0594598693891904e-05,
+ "loss": 0.781,
+ "step": 2488
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0588143168151257e-05,
+ "loss": 0.7616,
+ "step": 2489
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.058168739644875e-05,
+ "loss": 0.6406,
+ "step": 2490
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0575231381484195e-05,
+ "loss": 0.6616,
+ "step": 2491
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0568775125957495e-05,
+ "loss": 0.6431,
+ "step": 2492
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0562318632568667e-05,
+ "loss": 0.7655,
+ "step": 2493
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0555861904017819e-05,
+ "loss": 0.6742,
+ "step": 2494
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0549404943005157e-05,
+ "loss": 0.6963,
+ "step": 2495
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0542947752230987e-05,
+ "loss": 0.7556,
+ "step": 2496
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0536490334395713e-05,
+ "loss": 0.7575,
+ "step": 2497
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0530032692199827e-05,
+ "loss": 0.6813,
+ "step": 2498
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0523574828343923e-05,
+ "loss": 0.6796,
+ "step": 2499
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0517116745528683e-05,
+ "loss": 0.7944,
+ "step": 2500
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.051065844645488e-05,
+ "loss": 0.6621,
+ "step": 2501
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0504199933823381e-05,
+ "loss": 0.6693,
+ "step": 2502
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.049774121033514e-05,
+ "loss": 0.6615,
+ "step": 2503
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0491282278691197e-05,
+ "loss": 0.8034,
+ "step": 2504
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0484823141592682e-05,
+ "loss": 0.6772,
+ "step": 2505
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0478363801740811e-05,
+ "loss": 0.6936,
+ "step": 2506
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0471904261836888e-05,
+ "loss": 0.6805,
+ "step": 2507
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0465444524582288e-05,
+ "loss": 0.6253,
+ "step": 2508
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.045898459267849e-05,
+ "loss": 0.7539,
+ "step": 2509
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0452524468827028e-05,
+ "loss": 0.6628,
+ "step": 2510
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0446064155729538e-05,
+ "loss": 0.7457,
+ "step": 2511
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0439603656087729e-05,
+ "loss": 0.6745,
+ "step": 2512
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0433142972603378e-05,
+ "loss": 0.6769,
+ "step": 2513
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0426682107978354e-05,
+ "loss": 0.7162,
+ "step": 2514
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0420221064914592e-05,
+ "loss": 0.7079,
+ "step": 2515
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0413759846114106e-05,
+ "loss": 0.7353,
+ "step": 2516
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0407298454278983e-05,
+ "loss": 0.7668,
+ "step": 2517
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0400836892111379e-05,
+ "loss": 0.7267,
+ "step": 2518
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0394375162313519e-05,
+ "loss": 0.7039,
+ "step": 2519
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0387913267587713e-05,
+ "loss": 0.6408,
+ "step": 2520
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0381451210636321e-05,
+ "loss": 0.6815,
+ "step": 2521
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0374988994161783e-05,
+ "loss": 0.756,
+ "step": 2522
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0368526620866604e-05,
+ "loss": 0.7464,
+ "step": 2523
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0362064093453348e-05,
+ "loss": 0.6808,
+ "step": 2524
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0355601414624653e-05,
+ "loss": 0.6814,
+ "step": 2525
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0349138587083213e-05,
+ "loss": 0.7902,
+ "step": 2526
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0342675613531785e-05,
+ "loss": 0.5937,
+ "step": 2527
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.033621249667319e-05,
+ "loss": 0.6521,
+ "step": 2528
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0329749239210311e-05,
+ "loss": 0.6942,
+ "step": 2529
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0323285843846085e-05,
+ "loss": 0.7745,
+ "step": 2530
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0316822313283504e-05,
+ "loss": 0.5454,
+ "step": 2531
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0310358650225627e-05,
+ "loss": 0.679,
+ "step": 2532
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0303894857375557e-05,
+ "loss": 0.7673,
+ "step": 2533
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0297430937436456e-05,
+ "loss": 0.7579,
+ "step": 2534
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0290966893111547e-05,
+ "loss": 0.7572,
+ "step": 2535
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0284502727104092e-05,
+ "loss": 0.653,
+ "step": 2536
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0278038442117405e-05,
+ "loss": 0.5723,
+ "step": 2537
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0271574040854863e-05,
+ "loss": 0.7241,
+ "step": 2538
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0265109526019878e-05,
+ "loss": 0.7303,
+ "step": 2539
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0258644900315918e-05,
+ "loss": 0.7841,
+ "step": 2540
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.025218016644649e-05,
+ "loss": 0.7552,
+ "step": 2541
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0245715327115146e-05,
+ "loss": 0.7244,
+ "step": 2542
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0239250385025496e-05,
+ "loss": 0.6333,
+ "step": 2543
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.023278534288118e-05,
+ "loss": 0.7093,
+ "step": 2544
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0226320203385877e-05,
+ "loss": 0.6701,
+ "step": 2545
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.021985496924332e-05,
+ "loss": 0.6559,
+ "step": 2546
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0213389643157268e-05,
+ "loss": 0.7275,
+ "step": 2547
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0206924227831527e-05,
+ "loss": 0.6888,
+ "step": 2548
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.020045872596994e-05,
+ "loss": 0.6912,
+ "step": 2549
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0193993140276381e-05,
+ "loss": 0.6994,
+ "step": 2550
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0187527473454764e-05,
+ "loss": 0.83,
+ "step": 2551
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0181061728209034e-05,
+ "loss": 0.6317,
+ "step": 2552
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0174595907243172e-05,
+ "loss": 0.7559,
+ "step": 2553
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0168130013261186e-05,
+ "loss": 0.6831,
+ "step": 2554
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.016166404896712e-05,
+ "loss": 0.4863,
+ "step": 2555
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0155198017065039e-05,
+ "loss": 0.6854,
+ "step": 2556
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.014873192025905e-05,
+ "loss": 0.8049,
+ "step": 2557
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0142265761253272e-05,
+ "loss": 0.7132,
+ "step": 2558
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0135799542751861e-05,
+ "loss": 0.7488,
+ "step": 2559
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.012933326745899e-05,
+ "loss": 0.6545,
+ "step": 2560
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0122866938078863e-05,
+ "loss": 0.7,
+ "step": 2561
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0116400557315702e-05,
+ "loss": 0.5921,
+ "step": 2562
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0109934127873751e-05,
+ "loss": 0.7088,
+ "step": 2563
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0103467652457276e-05,
+ "loss": 0.5653,
+ "step": 2564
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0097001133770561e-05,
+ "loss": 0.7278,
+ "step": 2565
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0090534574517907e-05,
+ "loss": 0.7838,
+ "step": 2566
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0084067977403637e-05,
+ "loss": 0.7439,
+ "step": 2567
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0077601345132081e-05,
+ "loss": 0.7574,
+ "step": 2568
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0071134680407593e-05,
+ "loss": 0.6399,
+ "step": 2569
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0064667985934538e-05,
+ "loss": 0.5516,
+ "step": 2570
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0058201264417285e-05,
+ "loss": 0.7704,
+ "step": 2571
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0051734518560225e-05,
+ "loss": 0.6673,
+ "step": 2572
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0045267751067758e-05,
+ "loss": 0.7448,
+ "step": 2573
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0038800964644285e-05,
+ "loss": 0.6523,
+ "step": 2574
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0032334161994225e-05,
+ "loss": 0.7499,
+ "step": 2575
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0025867345821998e-05,
+ "loss": 0.8383,
+ "step": 2576
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0019400518832028e-05,
+ "loss": 0.6591,
+ "step": 2577
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0012933683728745e-05,
+ "loss": 0.7283,
+ "step": 2578
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0006466843216593e-05,
+ "loss": 0.6512,
+ "step": 2579
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 1e-05,
+ "loss": 0.7267,
+ "step": 2580
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.993533156783409e-06,
+ "loss": 0.7142,
+ "step": 2581
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.987066316271257e-06,
+ "loss": 0.5637,
+ "step": 2582
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.980599481167977e-06,
+ "loss": 0.739,
+ "step": 2583
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.974132654178008e-06,
+ "loss": 0.6915,
+ "step": 2584
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.967665838005777e-06,
+ "loss": 0.7767,
+ "step": 2585
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.961199035355717e-06,
+ "loss": 0.664,
+ "step": 2586
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.954732248932243e-06,
+ "loss": 0.721,
+ "step": 2587
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.948265481439778e-06,
+ "loss": 0.6673,
+ "step": 2588
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.941798735582718e-06,
+ "loss": 0.6951,
+ "step": 2589
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.935332014065467e-06,
+ "loss": 0.8158,
+ "step": 2590
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.92886531959241e-06,
+ "loss": 0.7022,
+ "step": 2591
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.922398654867922e-06,
+ "loss": 0.6818,
+ "step": 2592
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.915932022596368e-06,
+ "loss": 0.6545,
+ "step": 2593
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.909465425482093e-06,
+ "loss": 0.6641,
+ "step": 2594
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.90299886622944e-06,
+ "loss": 0.6121,
+ "step": 2595
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.896532347542728e-06,
+ "loss": 0.6588,
+ "step": 2596
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.890065872126252e-06,
+ "loss": 0.6945,
+ "step": 2597
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.883599442684301e-06,
+ "loss": 0.7603,
+ "step": 2598
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.877133061921138e-06,
+ "loss": 0.6892,
+ "step": 2599
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.870666732541014e-06,
+ "loss": 0.7209,
+ "step": 2600
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.864200457248144e-06,
+ "loss": 0.6827,
+ "step": 2601
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.857734238746731e-06,
+ "loss": 0.5941,
+ "step": 2602
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.851268079740951e-06,
+ "loss": 0.6995,
+ "step": 2603
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.84480198293496e-06,
+ "loss": 0.671,
+ "step": 2604
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.838335951032883e-06,
+ "loss": 0.689,
+ "step": 2605
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.831869986738816e-06,
+ "loss": 0.7205,
+ "step": 2606
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.82540409275683e-06,
+ "loss": 0.6568,
+ "step": 2607
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.81893827179097e-06,
+ "loss": 0.6036,
+ "step": 2608
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.81247252654524e-06,
+ "loss": 0.6996,
+ "step": 2609
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.806006859723624e-06,
+ "loss": 0.6527,
+ "step": 2610
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.799541274030065e-06,
+ "loss": 0.7136,
+ "step": 2611
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.793075772168475e-06,
+ "loss": 0.6823,
+ "step": 2612
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.786610356842734e-06,
+ "loss": 0.6106,
+ "step": 2613
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.780145030756684e-06,
+ "loss": 0.6928,
+ "step": 2614
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.773679796614124e-06,
+ "loss": 0.6371,
+ "step": 2615
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.767214657118823e-06,
+ "loss": 0.6797,
+ "step": 2616
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.760749614974506e-06,
+ "loss": 0.7142,
+ "step": 2617
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.754284672884856e-06,
+ "loss": 0.6496,
+ "step": 2618
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.747819833553516e-06,
+ "loss": 0.6297,
+ "step": 2619
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.741355099684087e-06,
+ "loss": 0.6274,
+ "step": 2620
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.734890473980122e-06,
+ "loss": 0.6958,
+ "step": 2621
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.728425959145139e-06,
+ "loss": 0.6608,
+ "step": 2622
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.721961557882597e-06,
+ "loss": 0.6732,
+ "step": 2623
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.715497272895913e-06,
+ "loss": 0.6615,
+ "step": 2624
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.709033106888456e-06,
+ "loss": 0.6497,
+ "step": 2625
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.702569062563545e-06,
+ "loss": 0.77,
+ "step": 2626
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.696105142624448e-06,
+ "loss": 0.8284,
+ "step": 2627
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.689641349774378e-06,
+ "loss": 0.6442,
+ "step": 2628
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.683177686716501e-06,
+ "loss": 0.7367,
+ "step": 2629
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.676714156153917e-06,
+ "loss": 0.6955,
+ "step": 2630
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.670250760789692e-06,
+ "loss": 0.6742,
+ "step": 2631
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.663787503326811e-06,
+ "loss": 0.6622,
+ "step": 2632
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.657324386468217e-06,
+ "loss": 0.8391,
+ "step": 2633
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.650861412916789e-06,
+ "loss": 0.7613,
+ "step": 2634
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.644398585375352e-06,
+ "loss": 0.7279,
+ "step": 2635
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.637935906546655e-06,
+ "loss": 0.8207,
+ "step": 2636
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.6314733791334e-06,
+ "loss": 0.713,
+ "step": 2637
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.62501100583822e-06,
+ "loss": 0.6702,
+ "step": 2638
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.61854878936368e-06,
+ "loss": 0.7885,
+ "step": 2639
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.61208673241229e-06,
+ "loss": 0.6378,
+ "step": 2640
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.605624837686483e-06,
+ "loss": 0.762,
+ "step": 2641
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.599163107888624e-06,
+ "loss": 0.7558,
+ "step": 2642
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.592701545721022e-06,
+ "loss": 0.6436,
+ "step": 2643
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.586240153885896e-06,
+ "loss": 0.6649,
+ "step": 2644
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.57977893508541e-06,
+ "loss": 0.6321,
+ "step": 2645
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.573317892021649e-06,
+ "loss": 0.7392,
+ "step": 2646
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.566857027396627e-06,
+ "loss": 0.6628,
+ "step": 2647
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.560396343912273e-06,
+ "loss": 0.5832,
+ "step": 2648
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.553935844270464e-06,
+ "loss": 0.7607,
+ "step": 2649
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.547475531172973e-06,
+ "loss": 0.7433,
+ "step": 2650
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.541015407321514e-06,
+ "loss": 0.746,
+ "step": 2651
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.534555475417713e-06,
+ "loss": 0.716,
+ "step": 2652
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.528095738163117e-06,
+ "loss": 0.7171,
+ "step": 2653
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.52163619825919e-06,
+ "loss": 0.6367,
+ "step": 2654
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.515176858407323e-06,
+ "loss": 0.6696,
+ "step": 2655
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.508717721308808e-06,
+ "loss": 0.6868,
+ "step": 2656
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.502258789664865e-06,
+ "loss": 0.6342,
+ "step": 2657
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.49580006617662e-06,
+ "loss": 0.751,
+ "step": 2658
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.489341553545121e-06,
+ "loss": 0.7304,
+ "step": 2659
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.482883254471319e-06,
+ "loss": 0.7574,
+ "step": 2660
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.47642517165608e-06,
+ "loss": 0.6878,
+ "step": 2661
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.469967307800176e-06,
+ "loss": 0.7206,
+ "step": 2662
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.46350966560429e-06,
+ "loss": 0.6046,
+ "step": 2663
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.457052247769018e-06,
+ "loss": 0.6267,
+ "step": 2664
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.450595056994848e-06,
+ "loss": 0.6148,
+ "step": 2665
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.444138095982186e-06,
+ "loss": 0.6714,
+ "step": 2666
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.437681367431335e-06,
+ "loss": 0.5613,
+ "step": 2667
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.431224874042506e-06,
+ "loss": 0.6845,
+ "step": 2668
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.424768618515809e-06,
+ "loss": 0.7024,
+ "step": 2669
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.418312603551253e-06,
+ "loss": 0.7157,
+ "step": 2670
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.411856831848745e-06,
+ "loss": 0.7006,
+ "step": 2671
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.4054013061081e-06,
+ "loss": 0.7497,
+ "step": 2672
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.398946029029015e-06,
+ "loss": 0.8383,
+ "step": 2673
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.392491003311097e-06,
+ "loss": 0.7002,
+ "step": 2674
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.386036231653836e-06,
+ "loss": 0.663,
+ "step": 2675
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.379581716756626e-06,
+ "loss": 0.7783,
+ "step": 2676
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.37312746131875e-06,
+ "loss": 0.7413,
+ "step": 2677
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.366673468039384e-06,
+ "loss": 0.7302,
+ "step": 2678
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.360219739617585e-06,
+ "loss": 0.6281,
+ "step": 2679
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.353766278752311e-06,
+ "loss": 0.6667,
+ "step": 2680
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.347313088142403e-06,
+ "loss": 0.5947,
+ "step": 2681
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.340860170486592e-06,
+ "loss": 0.5705,
+ "step": 2682
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.334407528483487e-06,
+ "loss": 0.6961,
+ "step": 2683
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.32795516483159e-06,
+ "loss": 0.7794,
+ "step": 2684
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.321503082229281e-06,
+ "loss": 0.6041,
+ "step": 2685
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.315051283374831e-06,
+ "loss": 0.781,
+ "step": 2686
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.308599770966385e-06,
+ "loss": 0.6969,
+ "step": 2687
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.302148547701966e-06,
+ "loss": 0.7034,
+ "step": 2688
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.295697616279483e-06,
+ "loss": 0.768,
+ "step": 2689
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.289246979396719e-06,
+ "loss": 0.6534,
+ "step": 2690
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.282796639751333e-06,
+ "loss": 0.6794,
+ "step": 2691
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.276346600040863e-06,
+ "loss": 0.739,
+ "step": 2692
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.269896862962718e-06,
+ "loss": 0.6498,
+ "step": 2693
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.263447431214182e-06,
+ "loss": 0.7846,
+ "step": 2694
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.256998307492415e-06,
+ "loss": 0.6312,
+ "step": 2695
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.250549494494444e-06,
+ "loss": 0.7946,
+ "step": 2696
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.244100994917167e-06,
+ "loss": 0.693,
+ "step": 2697
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.237652811457347e-06,
+ "loss": 0.758,
+ "step": 2698
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.231204946811624e-06,
+ "loss": 0.5519,
+ "step": 2699
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.224757403676493e-06,
+ "loss": 0.6072,
+ "step": 2700
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.218310184748325e-06,
+ "loss": 0.659,
+ "step": 2701
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.21186329272335e-06,
+ "loss": 0.598,
+ "step": 2702
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.20541673029766e-06,
+ "loss": 0.5944,
+ "step": 2703
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.198970500167218e-06,
+ "loss": 0.6332,
+ "step": 2704
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.192524605027838e-06,
+ "loss": 0.7523,
+ "step": 2705
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.186079047575197e-06,
+ "loss": 0.6739,
+ "step": 2706
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.179633830504832e-06,
+ "loss": 0.7886,
+ "step": 2707
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.173188956512138e-06,
+ "loss": 0.7171,
+ "step": 2708
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.166744428292362e-06,
+ "loss": 0.6623,
+ "step": 2709
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.160300248540613e-06,
+ "loss": 0.8386,
+ "step": 2710
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.153856419951854e-06,
+ "loss": 0.7787,
+ "step": 2711
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.147412945220887e-06,
+ "loss": 0.7845,
+ "step": 2712
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.140969827042392e-06,
+ "loss": 0.7379,
+ "step": 2713
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.134527068110877e-06,
+ "loss": 0.7715,
+ "step": 2714
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.128084671120709e-06,
+ "loss": 0.686,
+ "step": 2715
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.121642638766103e-06,
+ "loss": 0.7067,
+ "step": 2716
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.115200973741123e-06,
+ "loss": 0.7905,
+ "step": 2717
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.108759678739675e-06,
+ "loss": 0.6369,
+ "step": 2718
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.102318756455513e-06,
+ "loss": 0.8133,
+ "step": 2719
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.095878209582237e-06,
+ "loss": 0.7028,
+ "step": 2720
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.089438040813282e-06,
+ "loss": 0.6511,
+ "step": 2721
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.082998252841939e-06,
+ "loss": 0.7028,
+ "step": 2722
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.076558848361328e-06,
+ "loss": 0.6879,
+ "step": 2723
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.07011983006441e-06,
+ "loss": 0.7518,
+ "step": 2724
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.063681200643993e-06,
+ "loss": 0.6518,
+ "step": 2725
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.057242962792709e-06,
+ "loss": 0.7482,
+ "step": 2726
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.050805119203035e-06,
+ "loss": 0.7234,
+ "step": 2727
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.044367672567283e-06,
+ "loss": 0.6976,
+ "step": 2728
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.037930625577598e-06,
+ "loss": 0.6273,
+ "step": 2729
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.031493980925951e-06,
+ "loss": 0.7574,
+ "step": 2730
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.025057741304164e-06,
+ "loss": 0.5332,
+ "step": 2731
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.018621909403864e-06,
+ "loss": 0.7281,
+ "step": 2732
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.012186487916526e-06,
+ "loss": 0.7358,
+ "step": 2733
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.00575147953345e-06,
+ "loss": 0.678,
+ "step": 2734
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.999316886945752e-06,
+ "loss": 0.6839,
+ "step": 2735
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.99288271284439e-06,
+ "loss": 0.7702,
+ "step": 2736
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.986448959920137e-06,
+ "loss": 0.5721,
+ "step": 2737
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.980015630863595e-06,
+ "loss": 0.7195,
+ "step": 2738
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.97358272836518e-06,
+ "loss": 0.5454,
+ "step": 2739
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.967150255115144e-06,
+ "loss": 0.7546,
+ "step": 2740
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.96071821380355e-06,
+ "loss": 0.781,
+ "step": 2741
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.954286607120279e-06,
+ "loss": 0.6779,
+ "step": 2742
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.947855437755036e-06,
+ "loss": 0.6448,
+ "step": 2743
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.941424708397336e-06,
+ "loss": 0.7002,
+ "step": 2744
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.934994421736519e-06,
+ "loss": 0.6421,
+ "step": 2745
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.928564580461736e-06,
+ "loss": 0.726,
+ "step": 2746
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.922135187261944e-06,
+ "loss": 0.7028,
+ "step": 2747
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.91570624482592e-06,
+ "loss": 0.7391,
+ "step": 2748
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.90927775584226e-06,
+ "loss": 0.7155,
+ "step": 2749
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.902849722999356e-06,
+ "loss": 0.6991,
+ "step": 2750
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.896422148985418e-06,
+ "loss": 0.7394,
+ "step": 2751
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.88999503648846e-06,
+ "loss": 0.683,
+ "step": 2752
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.883568388196308e-06,
+ "loss": 0.6972,
+ "step": 2753
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.877142206796585e-06,
+ "loss": 0.6792,
+ "step": 2754
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.87071649497673e-06,
+ "loss": 0.7524,
+ "step": 2755
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.864291255423976e-06,
+ "loss": 0.5589,
+ "step": 2756
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.85786649082536e-06,
+ "loss": 0.7137,
+ "step": 2757
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.851442203867728e-06,
+ "loss": 0.675,
+ "step": 2758
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.845018397237722e-06,
+ "loss": 0.8424,
+ "step": 2759
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.838595073621778e-06,
+ "loss": 0.7306,
+ "step": 2760
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.832172235706137e-06,
+ "loss": 0.8027,
+ "step": 2761
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.825749886176832e-06,
+ "loss": 0.7358,
+ "step": 2762
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.819328027719692e-06,
+ "loss": 0.6168,
+ "step": 2763
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.812906663020348e-06,
+ "loss": 0.8429,
+ "step": 2764
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.806485794764215e-06,
+ "loss": 0.7004,
+ "step": 2765
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.800065425636502e-06,
+ "loss": 0.7432,
+ "step": 2766
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.793645558322218e-06,
+ "loss": 0.7194,
+ "step": 2767
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.787226195506152e-06,
+ "loss": 0.7045,
+ "step": 2768
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.780807339872886e-06,
+ "loss": 0.7373,
+ "step": 2769
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.774388994106789e-06,
+ "loss": 0.8012,
+ "step": 2770
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.76797116089202e-06,
+ "loss": 0.7311,
+ "step": 2771
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.761553842912519e-06,
+ "loss": 0.6812,
+ "step": 2772
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.755137042852012e-06,
+ "loss": 0.7259,
+ "step": 2773
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.74872076339401e-06,
+ "loss": 0.6457,
+ "step": 2774
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.742305007221801e-06,
+ "loss": 0.5565,
+ "step": 2775
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.735889777018466e-06,
+ "loss": 0.7094,
+ "step": 2776
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.729475075466856e-06,
+ "loss": 0.7607,
+ "step": 2777
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.723060905249603e-06,
+ "loss": 0.7231,
+ "step": 2778
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.716647269049115e-06,
+ "loss": 0.7249,
+ "step": 2779
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.71023416954758e-06,
+ "loss": 0.7157,
+ "step": 2780
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.703821609426963e-06,
+ "loss": 0.6274,
+ "step": 2781
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.697409591368997e-06,
+ "loss": 0.6954,
+ "step": 2782
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.690998118055193e-06,
+ "loss": 0.7452,
+ "step": 2783
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.684587192166835e-06,
+ "loss": 0.652,
+ "step": 2784
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.678176816384975e-06,
+ "loss": 0.7341,
+ "step": 2785
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.67176699339044e-06,
+ "loss": 0.723,
+ "step": 2786
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.665357725863816e-06,
+ "loss": 0.7781,
+ "step": 2787
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.658949016485467e-06,
+ "loss": 0.7021,
+ "step": 2788
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.65254086793552e-06,
+ "loss": 0.6681,
+ "step": 2789
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.646133282893865e-06,
+ "loss": 0.7315,
+ "step": 2790
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.639726264040159e-06,
+ "loss": 0.7019,
+ "step": 2791
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.633319814053819e-06,
+ "loss": 0.7439,
+ "step": 2792
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.626913935614026e-06,
+ "loss": 0.6421,
+ "step": 2793
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.62050863139973e-06,
+ "loss": 0.558,
+ "step": 2794
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.61410390408963e-06,
+ "loss": 0.7204,
+ "step": 2795
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.607699756362183e-06,
+ "loss": 0.7973,
+ "step": 2796
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.601296190895611e-06,
+ "loss": 0.631,
+ "step": 2797
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.594893210367889e-06,
+ "loss": 0.7566,
+ "step": 2798
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.588490817456749e-06,
+ "loss": 0.7897,
+ "step": 2799
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.582089014839674e-06,
+ "loss": 0.6943,
+ "step": 2800
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.575687805193902e-06,
+ "loss": 0.779,
+ "step": 2801
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.569287191196426e-06,
+ "loss": 0.6671,
+ "step": 2802
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.562887175523977e-06,
+ "loss": 0.7088,
+ "step": 2803
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.55648776085306e-06,
+ "loss": 0.7109,
+ "step": 2804
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.550088949859906e-06,
+ "loss": 0.7174,
+ "step": 2805
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.543690745220505e-06,
+ "loss": 0.7587,
+ "step": 2806
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.53729314961059e-06,
+ "loss": 0.5587,
+ "step": 2807
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.530896165705637e-06,
+ "loss": 0.6228,
+ "step": 2808
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.52449979618087e-06,
+ "loss": 0.682,
+ "step": 2809
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.518104043711257e-06,
+ "loss": 0.6929,
+ "step": 2810
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.511708910971506e-06,
+ "loss": 0.7115,
+ "step": 2811
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.505314400636057e-06,
+ "loss": 0.7074,
+ "step": 2812
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.498920515379112e-06,
+ "loss": 0.7647,
+ "step": 2813
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.492527257874589e-06,
+ "loss": 0.7607,
+ "step": 2814
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.486134630796157e-06,
+ "loss": 0.6397,
+ "step": 2815
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.479742636817216e-06,
+ "loss": 0.7436,
+ "step": 2816
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.4733512786109e-06,
+ "loss": 0.665,
+ "step": 2817
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.466960558850078e-06,
+ "loss": 0.6297,
+ "step": 2818
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.460570480207358e-06,
+ "loss": 0.6316,
+ "step": 2819
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.454181045355071e-06,
+ "loss": 0.7432,
+ "step": 2820
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.447792256965282e-06,
+ "loss": 0.7235,
+ "step": 2821
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.441404117709791e-06,
+ "loss": 0.5809,
+ "step": 2822
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.43501663026012e-06,
+ "loss": 0.7387,
+ "step": 2823
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.428629797287519e-06,
+ "loss": 0.6454,
+ "step": 2824
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.42224362146297e-06,
+ "loss": 0.7263,
+ "step": 2825
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.415858105457171e-06,
+ "loss": 0.6805,
+ "step": 2826
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.409473251940549e-06,
+ "loss": 0.6942,
+ "step": 2827
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.40308906358326e-06,
+ "loss": 0.6139,
+ "step": 2828
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.39670554305517e-06,
+ "loss": 0.6581,
+ "step": 2829
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.39032269302587e-06,
+ "loss": 0.75,
+ "step": 2830
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.383940516164677e-06,
+ "loss": 0.6194,
+ "step": 2831
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.37755901514062e-06,
+ "loss": 0.7525,
+ "step": 2832
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.371178192622445e-06,
+ "loss": 0.7378,
+ "step": 2833
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.36479805127862e-06,
+ "loss": 0.7187,
+ "step": 2834
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.358418593777322e-06,
+ "loss": 0.7238,
+ "step": 2835
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.352039822786443e-06,
+ "loss": 0.6152,
+ "step": 2836
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.345661740973592e-06,
+ "loss": 0.7192,
+ "step": 2837
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.339284351006086e-06,
+ "loss": 0.693,
+ "step": 2838
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.332907655550948e-06,
+ "loss": 0.6545,
+ "step": 2839
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.326531657274926e-06,
+ "loss": 0.6718,
+ "step": 2840
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.32015635884446e-06,
+ "loss": 0.6303,
+ "step": 2841
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.31378176292571e-06,
+ "loss": 0.6955,
+ "step": 2842
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.307407872184527e-06,
+ "loss": 0.6407,
+ "step": 2843
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.301034689286485e-06,
+ "loss": 0.6884,
+ "step": 2844
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.294662216896848e-06,
+ "loss": 0.6987,
+ "step": 2845
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.288290457680593e-06,
+ "loss": 0.5967,
+ "step": 2846
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.281919414302388e-06,
+ "loss": 0.5829,
+ "step": 2847
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.275549089426608e-06,
+ "loss": 0.6625,
+ "step": 2848
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.269179485717331e-06,
+ "loss": 0.7214,
+ "step": 2849
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.262810605838329e-06,
+ "loss": 0.721,
+ "step": 2850
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.256442452453073e-06,
+ "loss": 0.6028,
+ "step": 2851
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.250075028224723e-06,
+ "loss": 0.6863,
+ "step": 2852
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.243708335816145e-06,
+ "loss": 0.6745,
+ "step": 2853
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.237342377889893e-06,
+ "loss": 0.7504,
+ "step": 2854
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.230977157108213e-06,
+ "loss": 0.6141,
+ "step": 2855
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.224612676133046e-06,
+ "loss": 0.6527,
+ "step": 2856
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.218248937626018e-06,
+ "loss": 0.7025,
+ "step": 2857
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.211885944248456e-06,
+ "loss": 0.7586,
+ "step": 2858
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.205523698661365e-06,
+ "loss": 0.7052,
+ "step": 2859
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.19916220352544e-06,
+ "loss": 0.7196,
+ "step": 2860
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.19280146150106e-06,
+ "loss": 0.6953,
+ "step": 2861
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.186441475248295e-06,
+ "loss": 0.5742,
+ "step": 2862
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.180082247426895e-06,
+ "loss": 0.7686,
+ "step": 2863
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.17372378069629e-06,
+ "loss": 0.6455,
+ "step": 2864
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.167366077715598e-06,
+ "loss": 0.6779,
+ "step": 2865
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.161009141143612e-06,
+ "loss": 0.7242,
+ "step": 2866
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.15465297363881e-06,
+ "loss": 0.77,
+ "step": 2867
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.148297577859348e-06,
+ "loss": 0.6886,
+ "step": 2868
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.141942956463049e-06,
+ "loss": 0.6392,
+ "step": 2869
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.135589112107423e-06,
+ "loss": 0.7733,
+ "step": 2870
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.129236047449654e-06,
+ "loss": 0.6244,
+ "step": 2871
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.1228837651466e-06,
+ "loss": 0.7681,
+ "step": 2872
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.116532267854781e-06,
+ "loss": 0.6015,
+ "step": 2873
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.110181558230405e-06,
+ "loss": 0.6517,
+ "step": 2874
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.103831638929337e-06,
+ "loss": 0.7023,
+ "step": 2875
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.097482512607122e-06,
+ "loss": 0.7672,
+ "step": 2876
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.091134181918971e-06,
+ "loss": 0.5861,
+ "step": 2877
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.084786649519754e-06,
+ "loss": 0.7113,
+ "step": 2878
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.078439918064016e-06,
+ "loss": 0.7946,
+ "step": 2879
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.072093990205963e-06,
+ "loss": 0.7276,
+ "step": 2880
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.065748868599471e-06,
+ "loss": 0.5855,
+ "step": 2881
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.059404555898068e-06,
+ "loss": 0.845,
+ "step": 2882
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.053061054754952e-06,
+ "loss": 0.6599,
+ "step": 2883
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.046718367822976e-06,
+ "loss": 0.6109,
+ "step": 2884
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.04037649775466e-06,
+ "loss": 0.678,
+ "step": 2885
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.034035447202182e-06,
+ "loss": 0.7608,
+ "step": 2886
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.027695218817366e-06,
+ "loss": 0.6596,
+ "step": 2887
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.021355815251703e-06,
+ "loss": 0.6624,
+ "step": 2888
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.015017239156338e-06,
+ "loss": 0.8112,
+ "step": 2889
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.008679493182061e-06,
+ "loss": 0.5755,
+ "step": 2890
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.002342579979327e-06,
+ "loss": 0.6669,
+ "step": 2891
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.996006502198234e-06,
+ "loss": 0.703,
+ "step": 2892
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.989671262488534e-06,
+ "loss": 0.7542,
+ "step": 2893
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.983336863499634e-06,
+ "loss": 0.6516,
+ "step": 2894
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.977003307880579e-06,
+ "loss": 0.709,
+ "step": 2895
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.970670598280066e-06,
+ "loss": 0.6982,
+ "step": 2896
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.964338737346441e-06,
+ "loss": 0.682,
+ "step": 2897
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.95800772772769e-06,
+ "loss": 0.7245,
+ "step": 2898
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.951677572071446e-06,
+ "loss": 0.7783,
+ "step": 2899
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.945348273024985e-06,
+ "loss": 0.7308,
+ "step": 2900
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.939019833235223e-06,
+ "loss": 0.6894,
+ "step": 2901
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.932692255348711e-06,
+ "loss": 0.6547,
+ "step": 2902
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.926365542011662e-06,
+ "loss": 0.7124,
+ "step": 2903
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.9200396958699e-06,
+ "loss": 0.6974,
+ "step": 2904
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.9137147195689e-06,
+ "loss": 0.6864,
+ "step": 2905
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.907390615753771e-06,
+ "loss": 0.7092,
+ "step": 2906
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.901067387069264e-06,
+ "loss": 0.7943,
+ "step": 2907
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.894745036159747e-06,
+ "loss": 0.5598,
+ "step": 2908
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.888423565669236e-06,
+ "loss": 0.6385,
+ "step": 2909
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.882102978241377e-06,
+ "loss": 0.7457,
+ "step": 2910
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.875783276519435e-06,
+ "loss": 0.6871,
+ "step": 2911
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.869464463146326e-06,
+ "loss": 0.7629,
+ "step": 2912
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.863146540764572e-06,
+ "loss": 0.711,
+ "step": 2913
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.856829512016336e-06,
+ "loss": 0.7309,
+ "step": 2914
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.850513379543405e-06,
+ "loss": 0.7435,
+ "step": 2915
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.844198145987187e-06,
+ "loss": 0.5796,
+ "step": 2916
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.837883813988717e-06,
+ "loss": 0.5493,
+ "step": 2917
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.831570386188654e-06,
+ "loss": 0.6895,
+ "step": 2918
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.825257865227278e-06,
+ "loss": 0.6742,
+ "step": 2919
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.818946253744484e-06,
+ "loss": 0.6679,
+ "step": 2920
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.812635554379802e-06,
+ "loss": 0.7683,
+ "step": 2921
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.806325769772362e-06,
+ "loss": 0.6986,
+ "step": 2922
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.800016902560924e-06,
+ "loss": 0.6461,
+ "step": 2923
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.793708955383862e-06,
+ "loss": 0.805,
+ "step": 2924
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.787401930879159e-06,
+ "loss": 0.6857,
+ "step": 2925
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.78109583168442e-06,
+ "loss": 0.548,
+ "step": 2926
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.774790660436857e-06,
+ "loss": 0.72,
+ "step": 2927
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.768486419773301e-06,
+ "loss": 0.6133,
+ "step": 2928
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.762183112330182e-06,
+ "loss": 0.739,
+ "step": 2929
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.75588074074356e-06,
+ "loss": 0.6247,
+ "step": 2930
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.74957930764908e-06,
+ "loss": 0.6965,
+ "step": 2931
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.743278815682009e-06,
+ "loss": 0.7856,
+ "step": 2932
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.736979267477218e-06,
+ "loss": 0.7917,
+ "step": 2933
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.73068066566918e-06,
+ "loss": 0.7544,
+ "step": 2934
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.724383012891976e-06,
+ "loss": 0.6948,
+ "step": 2935
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.718086311779289e-06,
+ "loss": 0.666,
+ "step": 2936
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.7117905649644e-06,
+ "loss": 0.611,
+ "step": 2937
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.705495775080198e-06,
+ "loss": 0.6999,
+ "step": 2938
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.699201944759164e-06,
+ "loss": 0.5833,
+ "step": 2939
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.692909076633387e-06,
+ "loss": 0.7616,
+ "step": 2940
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.686617173334547e-06,
+ "loss": 0.7329,
+ "step": 2941
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.680326237493925e-06,
+ "loss": 0.7632,
+ "step": 2942
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.674036271742388e-06,
+ "loss": 0.7169,
+ "step": 2943
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.667747278710407e-06,
+ "loss": 0.621,
+ "step": 2944
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.661459261028045e-06,
+ "loss": 0.6987,
+ "step": 2945
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.655172221324953e-06,
+ "loss": 0.8262,
+ "step": 2946
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.648886162230373e-06,
+ "loss": 0.6441,
+ "step": 2947
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.64260108637314e-06,
+ "loss": 0.6532,
+ "step": 2948
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.63631699638168e-06,
+ "loss": 0.6824,
+ "step": 2949
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.630033894884005e-06,
+ "loss": 0.7688,
+ "step": 2950
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.623751784507707e-06,
+ "loss": 0.7049,
+ "step": 2951
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.617470667879969e-06,
+ "loss": 0.6407,
+ "step": 2952
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.6111905476275606e-06,
+ "loss": 0.628,
+ "step": 2953
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.6049114263768335e-06,
+ "loss": 0.7446,
+ "step": 2954
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.598633306753715e-06,
+ "loss": 0.6844,
+ "step": 2955
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.592356191383723e-06,
+ "loss": 0.7539,
+ "step": 2956
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.586080082891949e-06,
+ "loss": 0.6904,
+ "step": 2957
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.579804983903067e-06,
+ "loss": 0.6833,
+ "step": 2958
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.57353089704133e-06,
+ "loss": 0.6979,
+ "step": 2959
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.567257824930562e-06,
+ "loss": 0.6402,
+ "step": 2960
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.5609857701941645e-06,
+ "loss": 0.7013,
+ "step": 2961
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.554714735455119e-06,
+ "loss": 0.7024,
+ "step": 2962
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.548444723335975e-06,
+ "loss": 0.7173,
+ "step": 2963
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.542175736458852e-06,
+ "loss": 0.6403,
+ "step": 2964
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.535907777445449e-06,
+ "loss": 0.6548,
+ "step": 2965
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.529640848917026e-06,
+ "loss": 0.661,
+ "step": 2966
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.523374953494422e-06,
+ "loss": 0.6892,
+ "step": 2967
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.517110093798039e-06,
+ "loss": 0.6447,
+ "step": 2968
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.510846272447839e-06,
+ "loss": 0.6281,
+ "step": 2969
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.5045834920633606e-06,
+ "loss": 0.7364,
+ "step": 2970
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.498321755263704e-06,
+ "loss": 0.6219,
+ "step": 2971
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.492061064667526e-06,
+ "loss": 0.6544,
+ "step": 2972
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.485801422893057e-06,
+ "loss": 0.6915,
+ "step": 2973
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.47954283255808e-06,
+ "loss": 0.6703,
+ "step": 2974
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.473285296279942e-06,
+ "loss": 0.6829,
+ "step": 2975
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.46702881667555e-06,
+ "loss": 0.6788,
+ "step": 2976
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.460773396361371e-06,
+ "loss": 0.7622,
+ "step": 2977
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.454519037953419e-06,
+ "loss": 0.6909,
+ "step": 2978
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.448265744067275e-06,
+ "loss": 0.7138,
+ "step": 2979
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.442013517318072e-06,
+ "loss": 0.7626,
+ "step": 2980
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.435762360320491e-06,
+ "loss": 0.6719,
+ "step": 2981
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.42951227568877e-06,
+ "loss": 0.6893,
+ "step": 2982
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.423263266036705e-06,
+ "loss": 0.7445,
+ "step": 2983
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.417015333977624e-06,
+ "loss": 0.7744,
+ "step": 2984
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.410768482124431e-06,
+ "loss": 0.587,
+ "step": 2985
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.404522713089555e-06,
+ "loss": 0.7575,
+ "step": 2986
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3982780294849795e-06,
+ "loss": 0.7754,
+ "step": 2987
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.39203443392224e-06,
+ "loss": 0.5608,
+ "step": 2988
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.38579192901241e-06,
+ "loss": 0.7447,
+ "step": 2989
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3795505173661075e-06,
+ "loss": 0.6973,
+ "step": 2990
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.373310201593498e-06,
+ "loss": 0.6882,
+ "step": 2991
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3670709843042855e-06,
+ "loss": 0.7688,
+ "step": 2992
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.360832868107708e-06,
+ "loss": 0.8132,
+ "step": 2993
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.35459585561256e-06,
+ "loss": 0.7184,
+ "step": 2994
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.348359949427159e-06,
+ "loss": 0.6204,
+ "step": 2995
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.342125152159365e-06,
+ "loss": 0.7479,
+ "step": 2996
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.335891466416574e-06,
+ "loss": 0.4718,
+ "step": 2997
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.329658894805722e-06,
+ "loss": 0.6391,
+ "step": 2998
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.323427439933269e-06,
+ "loss": 0.5965,
+ "step": 2999
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3171971044052135e-06,
+ "loss": 0.7266,
+ "step": 3000
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.31096789082709e-06,
+ "loss": 0.7275,
+ "step": 3001
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.304739801803952e-06,
+ "loss": 0.7472,
+ "step": 3002
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2985128399404016e-06,
+ "loss": 0.7529,
+ "step": 3003
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.292287007840552e-06,
+ "loss": 0.6855,
+ "step": 3004
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2860623081080506e-06,
+ "loss": 0.5767,
+ "step": 3005
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.279838743346073e-06,
+ "loss": 0.6622,
+ "step": 3006
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.273616316157312e-06,
+ "loss": 0.7,
+ "step": 3007
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.267395029143998e-06,
+ "loss": 0.7082,
+ "step": 3008
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.261174884907874e-06,
+ "loss": 0.7552,
+ "step": 3009
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.254955886050209e-06,
+ "loss": 0.6344,
+ "step": 3010
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.248738035171787e-06,
+ "loss": 0.6887,
+ "step": 3011
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.242521334872929e-06,
+ "loss": 0.7406,
+ "step": 3012
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.236305787753454e-06,
+ "loss": 0.6268,
+ "step": 3013
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.230091396412711e-06,
+ "loss": 0.6633,
+ "step": 3014
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.223878163449563e-06,
+ "loss": 0.6445,
+ "step": 3015
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.217666091462385e-06,
+ "loss": 0.6431,
+ "step": 3016
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.211455183049072e-06,
+ "loss": 0.8163,
+ "step": 3017
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.205245440807032e-06,
+ "loss": 0.7747,
+ "step": 3018
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.199036867333179e-06,
+ "loss": 0.7395,
+ "step": 3019
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.192829465223942e-06,
+ "loss": 0.7499,
+ "step": 3020
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.186623237075265e-06,
+ "loss": 0.6988,
+ "step": 3021
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.180418185482596e-06,
+ "loss": 0.6694,
+ "step": 3022
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.174214313040888e-06,
+ "loss": 0.7329,
+ "step": 3023
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.168011622344611e-06,
+ "loss": 0.7823,
+ "step": 3024
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.161810115987727e-06,
+ "loss": 0.7288,
+ "step": 3025
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.155609796563714e-06,
+ "loss": 0.7135,
+ "step": 3026
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.14941066666555e-06,
+ "loss": 0.6766,
+ "step": 3027
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.143212728885714e-06,
+ "loss": 0.6966,
+ "step": 3028
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.137015985816183e-06,
+ "loss": 0.6362,
+ "step": 3029
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.130820440048447e-06,
+ "loss": 0.7779,
+ "step": 3030
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.124626094173484e-06,
+ "loss": 0.6072,
+ "step": 3031
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.118432950781775e-06,
+ "loss": 0.6813,
+ "step": 3032
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.1122410124632935e-06,
+ "loss": 0.5997,
+ "step": 3033
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.106050281807514e-06,
+ "loss": 0.5254,
+ "step": 3034
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0998607614034035e-06,
+ "loss": 0.6175,
+ "step": 3035
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.093672453839426e-06,
+ "loss": 0.7116,
+ "step": 3036
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.087485361703532e-06,
+ "loss": 0.6595,
+ "step": 3037
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.081299487583168e-06,
+ "loss": 0.6786,
+ "step": 3038
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.075114834065272e-06,
+ "loss": 0.7031,
+ "step": 3039
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.068931403736271e-06,
+ "loss": 0.7382,
+ "step": 3040
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.06274919918208e-06,
+ "loss": 0.6289,
+ "step": 3041
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.056568222988098e-06,
+ "loss": 0.6457,
+ "step": 3042
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0503884777392166e-06,
+ "loss": 0.7722,
+ "step": 3043
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.044209966019805e-06,
+ "loss": 0.6952,
+ "step": 3044
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0380326904137265e-06,
+ "loss": 0.5913,
+ "step": 3045
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.031856653504316e-06,
+ "loss": 0.7158,
+ "step": 3046
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.025681857874398e-06,
+ "loss": 0.6875,
+ "step": 3047
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.019508306106278e-06,
+ "loss": 0.6631,
+ "step": 3048
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.013336000781739e-06,
+ "loss": 0.6741,
+ "step": 3049
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.007164944482044e-06,
+ "loss": 0.7259,
+ "step": 3050
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.000995139787929e-06,
+ "loss": 0.6544,
+ "step": 3051
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.994826589279615e-06,
+ "loss": 0.7191,
+ "step": 3052
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.988659295536792e-06,
+ "loss": 0.6857,
+ "step": 3053
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.982493261138623e-06,
+ "loss": 0.6403,
+ "step": 3054
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9763284886637516e-06,
+ "loss": 0.7613,
+ "step": 3055
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.970164980690286e-06,
+ "loss": 0.6996,
+ "step": 3056
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.964002739795814e-06,
+ "loss": 0.6493,
+ "step": 3057
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.957841768557387e-06,
+ "loss": 0.665,
+ "step": 3058
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9516820695515285e-06,
+ "loss": 0.6135,
+ "step": 3059
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.945523645354227e-06,
+ "loss": 0.6932,
+ "step": 3060
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.939366498540936e-06,
+ "loss": 0.753,
+ "step": 3061
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.933210631686586e-06,
+ "loss": 0.7555,
+ "step": 3062
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.927056047365558e-06,
+ "loss": 0.6632,
+ "step": 3063
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.920902748151706e-06,
+ "loss": 0.811,
+ "step": 3064
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.914750736618341e-06,
+ "loss": 0.6235,
+ "step": 3065
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.908600015338242e-06,
+ "loss": 0.7539,
+ "step": 3066
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9024505868836465e-06,
+ "loss": 0.7451,
+ "step": 3067
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.896302453826242e-06,
+ "loss": 0.6685,
+ "step": 3068
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.890155618737189e-06,
+ "loss": 0.6528,
+ "step": 3069
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.884010084187094e-06,
+ "loss": 0.7006,
+ "step": 3070
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.877865852746026e-06,
+ "loss": 0.6588,
+ "step": 3071
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.871722926983504e-06,
+ "loss": 0.666,
+ "step": 3072
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.865581309468506e-06,
+ "loss": 0.6096,
+ "step": 3073
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.859441002769455e-06,
+ "loss": 0.7095,
+ "step": 3074
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.853302009454239e-06,
+ "loss": 0.7392,
+ "step": 3075
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.847164332090186e-06,
+ "loss": 0.7366,
+ "step": 3076
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.841027973244077e-06,
+ "loss": 0.5985,
+ "step": 3077
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.834892935482138e-06,
+ "loss": 0.7293,
+ "step": 3078
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.828759221370049e-06,
+ "loss": 0.642,
+ "step": 3079
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.8226268334729366e-06,
+ "loss": 0.707,
+ "step": 3080
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.816495774355362e-06,
+ "loss": 0.7681,
+ "step": 3081
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.810366046581342e-06,
+ "loss": 0.5634,
+ "step": 3082
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.804237652714334e-06,
+ "loss": 0.6877,
+ "step": 3083
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.798110595317229e-06,
+ "loss": 0.7043,
+ "step": 3084
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7919848769523786e-06,
+ "loss": 0.6854,
+ "step": 3085
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.785860500181554e-06,
+ "loss": 0.7095,
+ "step": 3086
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.779737467565974e-06,
+ "loss": 0.7266,
+ "step": 3087
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.773615781666301e-06,
+ "loss": 0.7544,
+ "step": 3088
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.767495445042618e-06,
+ "loss": 0.6253,
+ "step": 3089
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7613764602544605e-06,
+ "loss": 0.6572,
+ "step": 3090
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.755258829860791e-06,
+ "loss": 0.7376,
+ "step": 3091
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.749142556420009e-06,
+ "loss": 0.5584,
+ "step": 3092
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.743027642489935e-06,
+ "loss": 0.723,
+ "step": 3093
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.736914090627839e-06,
+ "loss": 0.6508,
+ "step": 3094
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.730801903390412e-06,
+ "loss": 0.6287,
+ "step": 3095
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.72469108333377e-06,
+ "loss": 0.7346,
+ "step": 3096
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.718581633013465e-06,
+ "loss": 0.6399,
+ "step": 3097
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.712473554984472e-06,
+ "loss": 0.7351,
+ "step": 3098
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.706366851801194e-06,
+ "loss": 0.6486,
+ "step": 3099
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7002615260174595e-06,
+ "loss": 0.6742,
+ "step": 3100
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.694157580186516e-06,
+ "loss": 0.5793,
+ "step": 3101
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.688055016861037e-06,
+ "loss": 0.6816,
+ "step": 3102
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.681953838593125e-06,
+ "loss": 0.6853,
+ "step": 3103
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6758540479342915e-06,
+ "loss": 0.6774,
+ "step": 3104
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.669755647435474e-06,
+ "loss": 0.6728,
+ "step": 3105
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.663658639647033e-06,
+ "loss": 0.6027,
+ "step": 3106
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.657563027118732e-06,
+ "loss": 0.5667,
+ "step": 3107
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.651468812399764e-06,
+ "loss": 0.7092,
+ "step": 3108
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.645375998038737e-06,
+ "loss": 0.7491,
+ "step": 3109
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.639284586583664e-06,
+ "loss": 0.7153,
+ "step": 3110
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.633194580581978e-06,
+ "loss": 0.741,
+ "step": 3111
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.627105982580529e-06,
+ "loss": 0.6633,
+ "step": 3112
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.621018795125569e-06,
+ "loss": 0.6624,
+ "step": 3113
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.614933020762766e-06,
+ "loss": 0.6012,
+ "step": 3114
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6088486620371905e-06,
+ "loss": 0.7797,
+ "step": 3115
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.602765721493328e-06,
+ "loss": 0.7931,
+ "step": 3116
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.596684201675069e-06,
+ "loss": 0.6425,
+ "step": 3117
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.590604105125708e-06,
+ "loss": 0.7854,
+ "step": 3118
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.584525434387945e-06,
+ "loss": 0.6347,
+ "step": 3119
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5784481920038835e-06,
+ "loss": 0.728,
+ "step": 3120
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.572372380515031e-06,
+ "loss": 0.7242,
+ "step": 3121
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5662980024622965e-06,
+ "loss": 0.7781,
+ "step": 3122
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.560225060385992e-06,
+ "loss": 0.7203,
+ "step": 3123
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.554153556825819e-06,
+ "loss": 0.6578,
+ "step": 3124
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.54808349432089e-06,
+ "loss": 0.6709,
+ "step": 3125
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.542014875409703e-06,
+ "loss": 0.6787,
+ "step": 3126
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.535947702630168e-06,
+ "loss": 0.7009,
+ "step": 3127
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.52988197851957e-06,
+ "loss": 0.7655,
+ "step": 3128
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.523817705614602e-06,
+ "loss": 0.5602,
+ "step": 3129
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.5177548864513505e-06,
+ "loss": 0.573,
+ "step": 3130
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.51169352356529e-06,
+ "loss": 0.6818,
+ "step": 3131
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.505633619491285e-06,
+ "loss": 0.6818,
+ "step": 3132
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.499575176763591e-06,
+ "loss": 0.6984,
+ "step": 3133
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.493518197915851e-06,
+ "loss": 0.5686,
+ "step": 3134
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.487462685481103e-06,
+ "loss": 0.6243,
+ "step": 3135
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.481408641991761e-06,
+ "loss": 0.6716,
+ "step": 3136
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4753560699796305e-06,
+ "loss": 0.6906,
+ "step": 3137
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4693049719759e-06,
+ "loss": 0.7078,
+ "step": 3138
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4632553505111484e-06,
+ "loss": 0.6893,
+ "step": 3139
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.457207208115328e-06,
+ "loss": 0.6873,
+ "step": 3140
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.451160547317777e-06,
+ "loss": 0.7764,
+ "step": 3141
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.44511537064721e-06,
+ "loss": 0.6806,
+ "step": 3142
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.439071680631725e-06,
+ "loss": 0.6746,
+ "step": 3143
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4330294797988e-06,
+ "loss": 0.72,
+ "step": 3144
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4269887706752845e-06,
+ "loss": 0.6599,
+ "step": 3145
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.420949555787406e-06,
+ "loss": 0.8032,
+ "step": 3146
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.414911837660768e-06,
+ "loss": 0.7566,
+ "step": 3147
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4088756188203514e-06,
+ "loss": 0.6585,
+ "step": 3148
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.402840901790508e-06,
+ "loss": 0.6598,
+ "step": 3149
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.396807689094956e-06,
+ "loss": 0.7289,
+ "step": 3150
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.39077598325679e-06,
+ "loss": 0.6692,
+ "step": 3151
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.384745786798474e-06,
+ "loss": 0.7031,
+ "step": 3152
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.378717102241842e-06,
+ "loss": 0.6975,
+ "step": 3153
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.372689932108092e-06,
+ "loss": 0.6825,
+ "step": 3154
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.366664278917789e-06,
+ "loss": 0.7796,
+ "step": 3155
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.360640145190864e-06,
+ "loss": 0.7875,
+ "step": 3156
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3546175334466185e-06,
+ "loss": 0.7237,
+ "step": 3157
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.348596446203713e-06,
+ "loss": 0.7102,
+ "step": 3158
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.342576885980165e-06,
+ "loss": 0.6879,
+ "step": 3159
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.336558855293362e-06,
+ "loss": 0.7182,
+ "step": 3160
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.330542356660047e-06,
+ "loss": 0.6168,
+ "step": 3161
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.324527392596326e-06,
+ "loss": 0.6429,
+ "step": 3162
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.318513965617657e-06,
+ "loss": 0.737,
+ "step": 3163
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.312502078238861e-06,
+ "loss": 0.606,
+ "step": 3164
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.306491732974111e-06,
+ "loss": 0.6956,
+ "step": 3165
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.300482932336943e-06,
+ "loss": 0.6158,
+ "step": 3166
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2944756788402394e-06,
+ "loss": 0.6839,
+ "step": 3167
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.288469974996234e-06,
+ "loss": 0.6592,
+ "step": 3168
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.282465823316519e-06,
+ "loss": 0.7499,
+ "step": 3169
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.276463226312036e-06,
+ "loss": 0.6276,
+ "step": 3170
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.270462186493071e-06,
+ "loss": 0.7104,
+ "step": 3171
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.264462706369263e-06,
+ "loss": 0.585,
+ "step": 3172
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2584647884496034e-06,
+ "loss": 0.6484,
+ "step": 3173
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.252468435242417e-06,
+ "loss": 0.7295,
+ "step": 3174
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.246473649255391e-06,
+ "loss": 0.6965,
+ "step": 3175
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.240480432995548e-06,
+ "loss": 0.767,
+ "step": 3176
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.234488788969251e-06,
+ "loss": 0.619,
+ "step": 3177
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.22849871968221e-06,
+ "loss": 0.7124,
+ "step": 3178
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.22251022763948e-06,
+ "loss": 0.6922,
+ "step": 3179
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.216523315345448e-06,
+ "loss": 0.752,
+ "step": 3180
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.2105379853038436e-06,
+ "loss": 0.6829,
+ "step": 3181
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.204554240017742e-06,
+ "loss": 0.7119,
+ "step": 3182
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.19857208198954e-06,
+ "loss": 0.5867,
+ "step": 3183
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.19259151372099e-06,
+ "loss": 0.6547,
+ "step": 3184
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.186612537713164e-06,
+ "loss": 0.8061,
+ "step": 3185
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.180635156466474e-06,
+ "loss": 0.6876,
+ "step": 3186
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1746593724806666e-06,
+ "loss": 0.7373,
+ "step": 3187
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.16868518825482e-06,
+ "loss": 0.5928,
+ "step": 3188
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.162712606287335e-06,
+ "loss": 0.6865,
+ "step": 3189
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.156741629075955e-06,
+ "loss": 0.6081,
+ "step": 3190
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.150772259117748e-06,
+ "loss": 0.7193,
+ "step": 3191
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1448044989091e-06,
+ "loss": 0.6594,
+ "step": 3192
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.138838350945744e-06,
+ "loss": 0.7573,
+ "step": 3193
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.132873817722718e-06,
+ "loss": 0.6159,
+ "step": 3194
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.126910901734397e-06,
+ "loss": 0.5966,
+ "step": 3195
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.120949605474479e-06,
+ "loss": 0.7459,
+ "step": 3196
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.114989931435976e-06,
+ "loss": 0.6607,
+ "step": 3197
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.109031882111231e-06,
+ "loss": 0.7765,
+ "step": 3198
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.103075459991905e-06,
+ "loss": 0.6839,
+ "step": 3199
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.097120667568977e-06,
+ "loss": 0.7679,
+ "step": 3200
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0911675073327385e-06,
+ "loss": 0.7381,
+ "step": 3201
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0852159817728185e-06,
+ "loss": 0.7269,
+ "step": 3202
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.079266093378138e-06,
+ "loss": 0.6603,
+ "step": 3203
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0733178446369475e-06,
+ "loss": 0.6784,
+ "step": 3204
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.067371238036812e-06,
+ "loss": 0.6604,
+ "step": 3205
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.061426276064602e-06,
+ "loss": 0.7601,
+ "step": 3206
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.055482961206503e-06,
+ "loss": 0.7864,
+ "step": 3207
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.049541295948016e-06,
+ "loss": 0.7194,
+ "step": 3208
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.043601282773952e-06,
+ "loss": 0.7382,
+ "step": 3209
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.037662924168419e-06,
+ "loss": 0.7318,
+ "step": 3210
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.031726222614855e-06,
+ "loss": 0.6675,
+ "step": 3211
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.025791180595983e-06,
+ "loss": 0.6388,
+ "step": 3212
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.019857800593847e-06,
+ "loss": 0.7155,
+ "step": 3213
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.013926085089789e-06,
+ "loss": 0.6233,
+ "step": 3214
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.007996036564454e-06,
+ "loss": 0.6206,
+ "step": 3215
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.002067657497792e-06,
+ "loss": 0.6673,
+ "step": 3216
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9961409503690605e-06,
+ "loss": 0.6831,
+ "step": 3217
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.990215917656805e-06,
+ "loss": 0.692,
+ "step": 3218
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.984292561838881e-06,
+ "loss": 0.7634,
+ "step": 3219
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.978370885392442e-06,
+ "loss": 0.7455,
+ "step": 3220
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9724508907939345e-06,
+ "loss": 0.7021,
+ "step": 3221
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.966532580519106e-06,
+ "loss": 0.6015,
+ "step": 3222
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.960615957042999e-06,
+ "loss": 0.6533,
+ "step": 3223
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.954701022839944e-06,
+ "loss": 0.6149,
+ "step": 3224
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.948787780383574e-06,
+ "loss": 0.577,
+ "step": 3225
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.942876232146812e-06,
+ "loss": 0.6591,
+ "step": 3226
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.936966380601867e-06,
+ "loss": 0.6271,
+ "step": 3227
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.931058228220246e-06,
+ "loss": 0.7577,
+ "step": 3228
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.92515177747274e-06,
+ "loss": 0.7077,
+ "step": 3229
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.919247030829432e-06,
+ "loss": 0.5902,
+ "step": 3230
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.913343990759695e-06,
+ "loss": 0.7142,
+ "step": 3231
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.907442659732179e-06,
+ "loss": 0.794,
+ "step": 3232
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.901543040214823e-06,
+ "loss": 0.7069,
+ "step": 3233
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.895645134674857e-06,
+ "loss": 0.6991,
+ "step": 3234
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.889748945578788e-06,
+ "loss": 0.6981,
+ "step": 3235
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.883854475392402e-06,
+ "loss": 0.7632,
+ "step": 3236
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.877961726580773e-06,
+ "loss": 0.6847,
+ "step": 3237
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.872070701608251e-06,
+ "loss": 0.6595,
+ "step": 3238
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.866181402938469e-06,
+ "loss": 0.6846,
+ "step": 3239
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.860293833034338e-06,
+ "loss": 0.663,
+ "step": 3240
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.854407994358037e-06,
+ "loss": 0.6369,
+ "step": 3241
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.84852388937103e-06,
+ "loss": 0.7313,
+ "step": 3242
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.842641520534055e-06,
+ "loss": 0.7103,
+ "step": 3243
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.836760890307121e-06,
+ "loss": 0.7124,
+ "step": 3244
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.830882001149517e-06,
+ "loss": 0.6731,
+ "step": 3245
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.825004855519789e-06,
+ "loss": 0.7229,
+ "step": 3246
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.819129455875764e-06,
+ "loss": 0.7121,
+ "step": 3247
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.813255804674545e-06,
+ "loss": 0.647,
+ "step": 3248
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8073839043724935e-06,
+ "loss": 0.7038,
+ "step": 3249
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.801513757425241e-06,
+ "loss": 0.6189,
+ "step": 3250
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7956453662876935e-06,
+ "loss": 0.6412,
+ "step": 3251
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.789778733414004e-06,
+ "loss": 0.6886,
+ "step": 3252
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.783913861257611e-06,
+ "loss": 0.7155,
+ "step": 3253
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.778050752271203e-06,
+ "loss": 0.6398,
+ "step": 3254
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.772189408906739e-06,
+ "loss": 0.6649,
+ "step": 3255
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.766329833615436e-06,
+ "loss": 0.6514,
+ "step": 3256
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.760472028847773e-06,
+ "loss": 0.6842,
+ "step": 3257
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.754615997053486e-06,
+ "loss": 0.7281,
+ "step": 3258
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.748761740681573e-06,
+ "loss": 0.7458,
+ "step": 3259
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.742909262180287e-06,
+ "loss": 0.6548,
+ "step": 3260
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.737058563997138e-06,
+ "loss": 0.6011,
+ "step": 3261
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7312096485788925e-06,
+ "loss": 0.6651,
+ "step": 3262
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.725362518371576e-06,
+ "loss": 0.6288,
+ "step": 3263
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.719517175820451e-06,
+ "loss": 0.608,
+ "step": 3264
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.713673623370046e-06,
+ "loss": 0.7602,
+ "step": 3265
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.707831863464146e-06,
+ "loss": 0.7079,
+ "step": 3266
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7019918985457745e-06,
+ "loss": 0.6867,
+ "step": 3267
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.696153731057213e-06,
+ "loss": 0.7123,
+ "step": 3268
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6903173634399785e-06,
+ "loss": 0.6895,
+ "step": 3269
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.684482798134847e-06,
+ "loss": 0.7333,
+ "step": 3270
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.678650037581841e-06,
+ "loss": 0.6501,
+ "step": 3271
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6728190842202204e-06,
+ "loss": 0.5719,
+ "step": 3272
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.666989940488496e-06,
+ "loss": 0.657,
+ "step": 3273
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.66116260882442e-06,
+ "loss": 0.7108,
+ "step": 3274
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.655337091664985e-06,
+ "loss": 0.6424,
+ "step": 3275
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.649513391446426e-06,
+ "loss": 0.6905,
+ "step": 3276
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.643691510604221e-06,
+ "loss": 0.6556,
+ "step": 3277
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.63787145157308e-06,
+ "loss": 0.6148,
+ "step": 3278
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.632053216786959e-06,
+ "loss": 0.6268,
+ "step": 3279
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.626236808679051e-06,
+ "loss": 0.7046,
+ "step": 3280
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.620422229681771e-06,
+ "loss": 0.7003,
+ "step": 3281
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.614609482226787e-06,
+ "loss": 0.6452,
+ "step": 3282
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.608798568744989e-06,
+ "loss": 0.7027,
+ "step": 3283
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.602989491666512e-06,
+ "loss": 0.6228,
+ "step": 3284
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.597182253420712e-06,
+ "loss": 0.5854,
+ "step": 3285
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.591376856436183e-06,
+ "loss": 0.6727,
+ "step": 3286
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.585573303140741e-06,
+ "loss": 0.6425,
+ "step": 3287
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.579771595961438e-06,
+ "loss": 0.6842,
+ "step": 3288
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.573971737324551e-06,
+ "loss": 0.6905,
+ "step": 3289
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.568173729655583e-06,
+ "loss": 0.6835,
+ "step": 3290
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.56237757537927e-06,
+ "loss": 0.6406,
+ "step": 3291
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.556583276919563e-06,
+ "loss": 0.6045,
+ "step": 3292
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.550790836699642e-06,
+ "loss": 0.685,
+ "step": 3293
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.54500025714191e-06,
+ "loss": 0.5527,
+ "step": 3294
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.539211540667992e-06,
+ "loss": 0.6867,
+ "step": 3295
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.533424689698732e-06,
+ "loss": 0.6263,
+ "step": 3296
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.527639706654195e-06,
+ "loss": 0.6216,
+ "step": 3297
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.5218565939536685e-06,
+ "loss": 0.7872,
+ "step": 3298
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.516075354015646e-06,
+ "loss": 0.7621,
+ "step": 3299
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.510295989257851e-06,
+ "loss": 0.6636,
+ "step": 3300
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.504518502097212e-06,
+ "loss": 0.6907,
+ "step": 3301
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.498742894949887e-06,
+ "loss": 0.745,
+ "step": 3302
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4929691702312395e-06,
+ "loss": 0.6664,
+ "step": 3303
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4871973303558335e-06,
+ "loss": 0.6562,
+ "step": 3304
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.481427377737465e-06,
+ "loss": 0.602,
+ "step": 3305
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.475659314789128e-06,
+ "loss": 0.6788,
+ "step": 3306
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4698931439230314e-06,
+ "loss": 0.7045,
+ "step": 3307
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.464128867550593e-06,
+ "loss": 0.6787,
+ "step": 3308
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.458366488082436e-06,
+ "loss": 0.6719,
+ "step": 3309
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.452606007928389e-06,
+ "loss": 0.7282,
+ "step": 3310
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.446847429497494e-06,
+ "loss": 0.6452,
+ "step": 3311
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.441090755197986e-06,
+ "loss": 0.7099,
+ "step": 3312
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.435335987437316e-06,
+ "loss": 0.7681,
+ "step": 3313
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4295831286221265e-06,
+ "loss": 0.7147,
+ "step": 3314
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.423832181158274e-06,
+ "loss": 0.6692,
+ "step": 3315
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.418083147450799e-06,
+ "loss": 0.6784,
+ "step": 3316
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.412336029903956e-06,
+ "loss": 0.7353,
+ "step": 3317
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.406590830921192e-06,
+ "loss": 0.7152,
+ "step": 3318
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.400847552905152e-06,
+ "loss": 0.6989,
+ "step": 3319
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.395106198257687e-06,
+ "loss": 0.72,
+ "step": 3320
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.3893667693798245e-06,
+ "loss": 0.6676,
+ "step": 3321
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.383629268671804e-06,
+ "loss": 0.7923,
+ "step": 3322
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.377893698533048e-06,
+ "loss": 0.7103,
+ "step": 3323
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.372160061362177e-06,
+ "loss": 0.7113,
+ "step": 3324
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.366428359557003e-06,
+ "loss": 0.6249,
+ "step": 3325
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.360698595514529e-06,
+ "loss": 0.7095,
+ "step": 3326
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.354970771630945e-06,
+ "loss": 0.6828,
+ "step": 3327
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.349244890301623e-06,
+ "loss": 0.6671,
+ "step": 3328
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.34352095392114e-06,
+ "loss": 0.6419,
+ "step": 3329
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.337798964883247e-06,
+ "loss": 0.5878,
+ "step": 3330
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.332078925580883e-06,
+ "loss": 0.6087,
+ "step": 3331
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.326360838406175e-06,
+ "loss": 0.6692,
+ "step": 3332
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.320644705750431e-06,
+ "loss": 0.6409,
+ "step": 3333
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.3149305300041345e-06,
+ "loss": 0.777,
+ "step": 3334
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.309218313556961e-06,
+ "loss": 0.678,
+ "step": 3335
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.3035080587977665e-06,
+ "loss": 0.6691,
+ "step": 3336
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.297799768114575e-06,
+ "loss": 0.7,
+ "step": 3337
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.292093443894611e-06,
+ "loss": 0.6436,
+ "step": 3338
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.286389088524253e-06,
+ "loss": 0.6987,
+ "step": 3339
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.280686704389066e-06,
+ "loss": 0.5633,
+ "step": 3340
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.274986293873796e-06,
+ "loss": 0.7091,
+ "step": 3341
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2692878593623546e-06,
+ "loss": 0.6573,
+ "step": 3342
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.263591403237831e-06,
+ "loss": 0.649,
+ "step": 3343
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2578969278824885e-06,
+ "loss": 0.5675,
+ "step": 3344
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.252204435677763e-06,
+ "loss": 0.5642,
+ "step": 3345
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.246513929004249e-06,
+ "loss": 0.7047,
+ "step": 3346
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2408254102417275e-06,
+ "loss": 0.757,
+ "step": 3347
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2351388817691415e-06,
+ "loss": 0.6717,
+ "step": 3348
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2294543459646e-06,
+ "loss": 0.6457,
+ "step": 3349
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.22377180520538e-06,
+ "loss": 0.6219,
+ "step": 3350
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2180912618679195e-06,
+ "loss": 0.5515,
+ "step": 3351
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.212412718327828e-06,
+ "loss": 0.6348,
+ "step": 3352
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.206736176959877e-06,
+ "loss": 0.6431,
+ "step": 3353
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.201061640138e-06,
+ "loss": 0.6615,
+ "step": 3354
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.19538911023529e-06,
+ "loss": 0.7028,
+ "step": 3355
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.189718589624004e-06,
+ "loss": 0.7167,
+ "step": 3356
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.184050080675558e-06,
+ "loss": 0.7833,
+ "step": 3357
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.178383585760526e-06,
+ "loss": 0.6497,
+ "step": 3358
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1727191072486385e-06,
+ "loss": 0.6938,
+ "step": 3359
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.167056647508786e-06,
+ "loss": 0.5596,
+ "step": 3360
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.161396208909012e-06,
+ "loss": 0.5762,
+ "step": 3361
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.155737793816518e-06,
+ "loss": 0.7337,
+ "step": 3362
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.150081404597654e-06,
+ "loss": 0.7245,
+ "step": 3363
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.144427043617919e-06,
+ "loss": 0.5806,
+ "step": 3364
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.138774713241983e-06,
+ "loss": 0.7245,
+ "step": 3365
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.133124415833648e-06,
+ "loss": 0.6182,
+ "step": 3366
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.127476153755873e-06,
+ "loss": 0.6613,
+ "step": 3367
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1218299293707695e-06,
+ "loss": 0.5847,
+ "step": 3368
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.116185745039585e-06,
+ "loss": 0.7631,
+ "step": 3369
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.110543603122722e-06,
+ "loss": 0.6429,
+ "step": 3370
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.10490350597973e-06,
+ "loss": 0.6546,
+ "step": 3371
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.099265455969302e-06,
+ "loss": 0.5668,
+ "step": 3372
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.093629455449273e-06,
+ "loss": 0.5837,
+ "step": 3373
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.0879955067766215e-06,
+ "loss": 0.7366,
+ "step": 3374
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.082363612307471e-06,
+ "loss": 0.7259,
+ "step": 3375
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.0767337743970805e-06,
+ "loss": 0.7689,
+ "step": 3376
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.071105995399852e-06,
+ "loss": 0.6395,
+ "step": 3377
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.06548027766933e-06,
+ "loss": 0.569,
+ "step": 3378
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.059856623558189e-06,
+ "loss": 0.7308,
+ "step": 3379
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.054235035418252e-06,
+ "loss": 0.6615,
+ "step": 3380
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.0486155156004615e-06,
+ "loss": 0.6919,
+ "step": 3381
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.042998066454909e-06,
+ "loss": 0.6022,
+ "step": 3382
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0373826903308095e-06,
+ "loss": 0.7068,
+ "step": 3383
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.031769389576528e-06,
+ "loss": 0.6742,
+ "step": 3384
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.026158166539548e-06,
+ "loss": 0.7867,
+ "step": 3385
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.02054902356648e-06,
+ "loss": 0.6035,
+ "step": 3386
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.014941963003075e-06,
+ "loss": 0.7024,
+ "step": 3387
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.009336987194209e-06,
+ "loss": 0.609,
+ "step": 3388
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.003734098483887e-06,
+ "loss": 0.623,
+ "step": 3389
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.99813329921524e-06,
+ "loss": 0.7637,
+ "step": 3390
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.992534591730527e-06,
+ "loss": 0.5932,
+ "step": 3391
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.986937978371132e-06,
+ "loss": 0.7868,
+ "step": 3392
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.981343461477559e-06,
+ "loss": 0.7687,
+ "step": 3393
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.975751043389441e-06,
+ "loss": 0.6989,
+ "step": 3394
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.970160726445531e-06,
+ "loss": 0.7239,
+ "step": 3395
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.964572512983703e-06,
+ "loss": 0.6473,
+ "step": 3396
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9589864053409545e-06,
+ "loss": 0.6753,
+ "step": 3397
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9534024058533925e-06,
+ "loss": 0.6885,
+ "step": 3398
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9478205168562535e-06,
+ "loss": 0.7446,
+ "step": 3399
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.942240740683886e-06,
+ "loss": 0.766,
+ "step": 3400
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.936663079669753e-06,
+ "loss": 0.5977,
+ "step": 3401
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9310875361464475e-06,
+ "loss": 0.6844,
+ "step": 3402
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.925514112445653e-06,
+ "loss": 0.7095,
+ "step": 3403
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.919942810898184e-06,
+ "loss": 0.7624,
+ "step": 3404
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.914373633833962e-06,
+ "loss": 0.751,
+ "step": 3405
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.908806583582021e-06,
+ "loss": 0.6091,
+ "step": 3406
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9032416624705036e-06,
+ "loss": 0.6868,
+ "step": 3407
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.897678872826665e-06,
+ "loss": 0.7345,
+ "step": 3408
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.892118216976871e-06,
+ "loss": 0.6814,
+ "step": 3409
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.886559697246579e-06,
+ "loss": 0.6134,
+ "step": 3410
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.881003315960378e-06,
+ "loss": 0.7528,
+ "step": 3411
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.875449075441947e-06,
+ "loss": 0.6858,
+ "step": 3412
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.869896978014071e-06,
+ "loss": 0.7497,
+ "step": 3413
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.864347025998643e-06,
+ "loss": 0.5751,
+ "step": 3414
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.858799221716659e-06,
+ "loss": 0.6644,
+ "step": 3415
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.853253567488207e-06,
+ "loss": 0.7223,
+ "step": 3416
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.847710065632489e-06,
+ "loss": 0.5934,
+ "step": 3417
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.842168718467798e-06,
+ "loss": 0.6421,
+ "step": 3418
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.836629528311527e-06,
+ "loss": 0.7837,
+ "step": 3419
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.83109249748018e-06,
+ "loss": 0.6456,
+ "step": 3420
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.8255576282893355e-06,
+ "loss": 0.6292,
+ "step": 3421
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.8200249230536836e-06,
+ "loss": 0.7188,
+ "step": 3422
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.814494384087004e-06,
+ "loss": 0.7746,
+ "step": 3423
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.808966013702173e-06,
+ "loss": 0.6331,
+ "step": 3424
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.803439814211158e-06,
+ "loss": 0.694,
+ "step": 3425
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.79791578792502e-06,
+ "loss": 0.6155,
+ "step": 3426
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.792393937153914e-06,
+ "loss": 0.7019,
+ "step": 3427
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.78687426420707e-06,
+ "loss": 0.6695,
+ "step": 3428
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.78135677139283e-06,
+ "loss": 0.6954,
+ "step": 3429
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.775841461018609e-06,
+ "loss": 0.7337,
+ "step": 3430
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.770328335390915e-06,
+ "loss": 0.7118,
+ "step": 3431
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7648173968153434e-06,
+ "loss": 0.7869,
+ "step": 3432
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.759308647596564e-06,
+ "loss": 0.6644,
+ "step": 3433
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.753802090038344e-06,
+ "loss": 0.717,
+ "step": 3434
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7482977264435305e-06,
+ "loss": 0.7364,
+ "step": 3435
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.74279555911405e-06,
+ "loss": 0.7479,
+ "step": 3436
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.737295590350913e-06,
+ "loss": 0.625,
+ "step": 3437
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.731797822454211e-06,
+ "loss": 0.6684,
+ "step": 3438
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.726302257723113e-06,
+ "loss": 0.6869,
+ "step": 3439
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7208088984558685e-06,
+ "loss": 0.8054,
+ "step": 3440
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7153177469498045e-06,
+ "loss": 0.7373,
+ "step": 3441
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.709828805501323e-06,
+ "loss": 0.7396,
+ "step": 3442
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.704342076405902e-06,
+ "loss": 0.7583,
+ "step": 3443
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.698857561958102e-06,
+ "loss": 0.6946,
+ "step": 3444
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6933752644515404e-06,
+ "loss": 0.6979,
+ "step": 3445
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.687895186178919e-06,
+ "loss": 0.7772,
+ "step": 3446
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.682417329432016e-06,
+ "loss": 0.657,
+ "step": 3447
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.676941696501674e-06,
+ "loss": 0.6356,
+ "step": 3448
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.671468289677802e-06,
+ "loss": 0.6438,
+ "step": 3449
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6659971112493895e-06,
+ "loss": 0.7334,
+ "step": 3450
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.660528163504479e-06,
+ "loss": 0.7819,
+ "step": 3451
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.65506144873019e-06,
+ "loss": 0.7254,
+ "step": 3452
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6495969692127084e-06,
+ "loss": 0.6356,
+ "step": 3453
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.644134727237281e-06,
+ "loss": 0.701,
+ "step": 3454
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6386747250882226e-06,
+ "loss": 0.7409,
+ "step": 3455
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.633216965048909e-06,
+ "loss": 0.6054,
+ "step": 3456
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6277614494017784e-06,
+ "loss": 0.6536,
+ "step": 3457
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.622308180428332e-06,
+ "loss": 0.6697,
+ "step": 3458
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.616857160409131e-06,
+ "loss": 0.7346,
+ "step": 3459
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.611408391623794e-06,
+ "loss": 0.6438,
+ "step": 3460
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.605961876351001e-06,
+ "loss": 0.6724,
+ "step": 3461
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.600517616868491e-06,
+ "loss": 0.6396,
+ "step": 3462
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.595075615453049e-06,
+ "loss": 0.7534,
+ "step": 3463
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.589635874380525e-06,
+ "loss": 0.6747,
+ "step": 3464
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.584198395925829e-06,
+ "loss": 0.7329,
+ "step": 3465
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.578763182362915e-06,
+ "loss": 0.7282,
+ "step": 3466
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5733302359647955e-06,
+ "loss": 0.7403,
+ "step": 3467
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.567899559003526e-06,
+ "loss": 0.7272,
+ "step": 3468
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.562471153750221e-06,
+ "loss": 0.7387,
+ "step": 3469
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5570450224750455e-06,
+ "loss": 0.643,
+ "step": 3470
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.55162116744721e-06,
+ "loss": 0.7071,
+ "step": 3471
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.546199590934973e-06,
+ "loss": 0.7697,
+ "step": 3472
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5407802952056435e-06,
+ "loss": 0.7614,
+ "step": 3473
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.535363282525575e-06,
+ "loss": 0.6633,
+ "step": 3474
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.529948555160162e-06,
+ "loss": 0.7022,
+ "step": 3475
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.52453611537385e-06,
+ "loss": 0.7307,
+ "step": 3476
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.519125965430125e-06,
+ "loss": 0.6954,
+ "step": 3477
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.513718107591514e-06,
+ "loss": 0.6134,
+ "step": 3478
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.508312544119592e-06,
+ "loss": 0.7087,
+ "step": 3479
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.50290927727496e-06,
+ "loss": 0.7753,
+ "step": 3480
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.497508309317271e-06,
+ "loss": 0.6923,
+ "step": 3481
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4921096425052126e-06,
+ "loss": 0.7145,
+ "step": 3482
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.486713279096515e-06,
+ "loss": 0.7511,
+ "step": 3483
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4813192213479424e-06,
+ "loss": 0.6406,
+ "step": 3484
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.475927471515287e-06,
+ "loss": 0.6602,
+ "step": 3485
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.470538031853383e-06,
+ "loss": 0.6737,
+ "step": 3486
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.465150904616101e-06,
+ "loss": 0.5953,
+ "step": 3487
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.459766092056338e-06,
+ "loss": 0.6354,
+ "step": 3488
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.454383596426028e-06,
+ "loss": 0.6891,
+ "step": 3489
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.449003419976133e-06,
+ "loss": 0.6054,
+ "step": 3490
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.443625564956648e-06,
+ "loss": 0.7061,
+ "step": 3491
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.438250033616595e-06,
+ "loss": 0.6898,
+ "step": 3492
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.432876828204022e-06,
+ "loss": 0.7531,
+ "step": 3493
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.427505950966012e-06,
+ "loss": 0.7034,
+ "step": 3494
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4221374041486654e-06,
+ "loss": 0.7035,
+ "step": 3495
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.416771189997112e-06,
+ "loss": 0.6969,
+ "step": 3496
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.411407310755513e-06,
+ "loss": 0.7818,
+ "step": 3497
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.406045768667036e-06,
+ "loss": 0.7156,
+ "step": 3498
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.400686565973886e-06,
+ "loss": 0.6324,
+ "step": 3499
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.395329704917282e-06,
+ "loss": 0.6056,
+ "step": 3500
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.389975187737472e-06,
+ "loss": 0.718,
+ "step": 3501
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3846230166737215e-06,
+ "loss": 0.6587,
+ "step": 3502
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3792731939643015e-06,
+ "loss": 0.7435,
+ "step": 3503
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.373925721846519e-06,
+ "loss": 0.6185,
+ "step": 3504
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.368580602556687e-06,
+ "loss": 0.6832,
+ "step": 3505
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.363237838330139e-06,
+ "loss": 0.6392,
+ "step": 3506
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3578974314012225e-06,
+ "loss": 0.6798,
+ "step": 3507
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.352559384003299e-06,
+ "loss": 0.6969,
+ "step": 3508
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.347223698368748e-06,
+ "loss": 0.7017,
+ "step": 3509
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3418903767289435e-06,
+ "loss": 0.7517,
+ "step": 3510
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.336559421314298e-06,
+ "loss": 0.6485,
+ "step": 3511
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.331230834354214e-06,
+ "loss": 0.6088,
+ "step": 3512
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.325904618077114e-06,
+ "loss": 0.6121,
+ "step": 3513
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.320580774710425e-06,
+ "loss": 0.7198,
+ "step": 3514
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.315259306480575e-06,
+ "loss": 0.683,
+ "step": 3515
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3099402156130095e-06,
+ "loss": 0.7693,
+ "step": 3516
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.304623504332178e-06,
+ "loss": 0.7516,
+ "step": 3517
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.29930917486153e-06,
+ "loss": 0.7249,
+ "step": 3518
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.293997229423523e-06,
+ "loss": 0.5759,
+ "step": 3519
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.288687670239615e-06,
+ "loss": 0.7233,
+ "step": 3520
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.283380499530269e-06,
+ "loss": 0.775,
+ "step": 3521
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.278075719514944e-06,
+ "loss": 0.6732,
+ "step": 3522
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2727733324121055e-06,
+ "loss": 0.63,
+ "step": 3523
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.267473340439213e-06,
+ "loss": 0.6501,
+ "step": 3524
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.262175745812729e-06,
+ "loss": 0.6944,
+ "step": 3525
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.256880550748114e-06,
+ "loss": 0.652,
+ "step": 3526
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.251587757459813e-06,
+ "loss": 0.6741,
+ "step": 3527
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.246297368161278e-06,
+ "loss": 0.6685,
+ "step": 3528
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.241009385064958e-06,
+ "loss": 0.792,
+ "step": 3529
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.235723810382289e-06,
+ "loss": 0.7186,
+ "step": 3530
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2304406463237026e-06,
+ "loss": 0.7017,
+ "step": 3531
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.225159895098623e-06,
+ "loss": 0.6268,
+ "step": 3532
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.219881558915458e-06,
+ "loss": 0.7276,
+ "step": 3533
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.214605639981615e-06,
+ "loss": 0.6856,
+ "step": 3534
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.209332140503486e-06,
+ "loss": 0.72,
+ "step": 3535
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.204061062686452e-06,
+ "loss": 0.65,
+ "step": 3536
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.198792408734882e-06,
+ "loss": 0.6612,
+ "step": 3537
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.193526180852129e-06,
+ "loss": 0.6342,
+ "step": 3538
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1882623812405345e-06,
+ "loss": 0.7011,
+ "step": 3539
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.183001012101423e-06,
+ "loss": 0.585,
+ "step": 3540
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1777420756351e-06,
+ "loss": 0.5931,
+ "step": 3541
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.17248557404086e-06,
+ "loss": 0.7368,
+ "step": 3542
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1672315095169725e-06,
+ "loss": 0.6498,
+ "step": 3543
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.161979884260696e-06,
+ "loss": 0.6741,
+ "step": 3544
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1567307004682546e-06,
+ "loss": 0.6182,
+ "step": 3545
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.151483960334862e-06,
+ "loss": 0.6032,
+ "step": 3546
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.146239666054716e-06,
+ "loss": 0.6509,
+ "step": 3547
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.140997819820979e-06,
+ "loss": 0.7124,
+ "step": 3548
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.135758423825798e-06,
+ "loss": 0.7412,
+ "step": 3549
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.130521480260285e-06,
+ "loss": 0.6857,
+ "step": 3550
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.125286991314538e-06,
+ "loss": 0.6227,
+ "step": 3551
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1200549591776215e-06,
+ "loss": 0.689,
+ "step": 3552
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1148253860375764e-06,
+ "loss": 0.74,
+ "step": 3553
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.109598274081413e-06,
+ "loss": 0.6973,
+ "step": 3554
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.104373625495114e-06,
+ "loss": 0.5934,
+ "step": 3555
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.099151442463629e-06,
+ "loss": 0.6991,
+ "step": 3556
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.09393172717088e-06,
+ "loss": 0.6268,
+ "step": 3557
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.088714481799755e-06,
+ "loss": 0.6335,
+ "step": 3558
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.083499708532108e-06,
+ "loss": 0.7248,
+ "step": 3559
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.078287409548764e-06,
+ "loss": 0.6703,
+ "step": 3560
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.073077587029509e-06,
+ "loss": 0.6513,
+ "step": 3561
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.06787024315309e-06,
+ "loss": 0.6274,
+ "step": 3562
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0626653800972236e-06,
+ "loss": 0.6904,
+ "step": 3563
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.057463000038586e-06,
+ "loss": 0.7078,
+ "step": 3564
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0522631051528205e-06,
+ "loss": 0.6418,
+ "step": 3565
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.047065697614526e-06,
+ "loss": 0.7108,
+ "step": 3566
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0418707795972575e-06,
+ "loss": 0.6311,
+ "step": 3567
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.036678353273535e-06,
+ "loss": 0.6603,
+ "step": 3568
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.031488420814834e-06,
+ "loss": 0.7043,
+ "step": 3569
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.026300984391589e-06,
+ "loss": 0.7342,
+ "step": 3570
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.021116046173189e-06,
+ "loss": 0.6971,
+ "step": 3571
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.015933608327978e-06,
+ "loss": 0.6524,
+ "step": 3572
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.010753673023254e-06,
+ "loss": 0.5689,
+ "step": 3573
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.005576242425272e-06,
+ "loss": 0.5879,
+ "step": 3574
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.000401318699236e-06,
+ "loss": 0.7282,
+ "step": 3575
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9952289040093e-06,
+ "loss": 0.6692,
+ "step": 3576
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9900590005185735e-06,
+ "loss": 0.6458,
+ "step": 3577
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.984891610389113e-06,
+ "loss": 0.7011,
+ "step": 3578
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9797267357819294e-06,
+ "loss": 0.686,
+ "step": 3579
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.974564378856968e-06,
+ "loss": 0.7502,
+ "step": 3580
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.969404541773132e-06,
+ "loss": 0.6571,
+ "step": 3581
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.964247226688268e-06,
+ "loss": 0.6826,
+ "step": 3582
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.959092435759174e-06,
+ "loss": 0.7421,
+ "step": 3583
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.953940171141588e-06,
+ "loss": 0.6458,
+ "step": 3584
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.9487904349901815e-06,
+ "loss": 0.6405,
+ "step": 3585
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.9436432294585835e-06,
+ "loss": 0.7136,
+ "step": 3586
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.938498556699357e-06,
+ "loss": 0.7458,
+ "step": 3587
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.933356418864008e-06,
+ "loss": 0.5202,
+ "step": 3588
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.928216818102982e-06,
+ "loss": 0.7142,
+ "step": 3589
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.923079756565664e-06,
+ "loss": 0.7112,
+ "step": 3590
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.917945236400374e-06,
+ "loss": 0.6465,
+ "step": 3591
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.912813259754375e-06,
+ "loss": 0.782,
+ "step": 3592
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.907683828773861e-06,
+ "loss": 0.7172,
+ "step": 3593
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.9025569456039655e-06,
+ "loss": 0.6622,
+ "step": 3594
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.897432612388752e-06,
+ "loss": 0.6953,
+ "step": 3595
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.892310831271223e-06,
+ "loss": 0.6886,
+ "step": 3596
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8871916043933065e-06,
+ "loss": 0.625,
+ "step": 3597
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.882074933895869e-06,
+ "loss": 0.6056,
+ "step": 3598
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.876960821918704e-06,
+ "loss": 0.6973,
+ "step": 3599
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.871849270600534e-06,
+ "loss": 0.6198,
+ "step": 3600
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.866740282079023e-06,
+ "loss": 0.6827,
+ "step": 3601
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.861633858490746e-06,
+ "loss": 0.6989,
+ "step": 3602
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8565300019712115e-06,
+ "loss": 0.6177,
+ "step": 3603
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.851428714654858e-06,
+ "loss": 0.7616,
+ "step": 3604
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.846329998675048e-06,
+ "loss": 0.7217,
+ "step": 3605
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.841233856164066e-06,
+ "loss": 0.6865,
+ "step": 3606
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.836140289253124e-06,
+ "loss": 0.6427,
+ "step": 3607
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.831049300072357e-06,
+ "loss": 0.532,
+ "step": 3608
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8259608907508105e-06,
+ "loss": 0.6157,
+ "step": 3609
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.82087506341647e-06,
+ "loss": 0.6255,
+ "step": 3610
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.81579182019623e-06,
+ "loss": 0.7575,
+ "step": 3611
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.810711163215904e-06,
+ "loss": 0.7369,
+ "step": 3612
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8056330946002283e-06,
+ "loss": 0.7422,
+ "step": 3613
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8005576164728562e-06,
+ "loss": 0.7267,
+ "step": 3614
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7954847309563504e-06,
+ "loss": 0.7339,
+ "step": 3615
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7904144401721976e-06,
+ "loss": 0.6555,
+ "step": 3616
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7853467462407966e-06,
+ "loss": 0.626,
+ "step": 3617
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7802816512814577e-06,
+ "loss": 0.6423,
+ "step": 3618
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7752191574124176e-06,
+ "loss": 0.672,
+ "step": 3619
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7701592667508026e-06,
+ "loss": 0.675,
+ "step": 3620
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7651019814126656e-06,
+ "loss": 0.7499,
+ "step": 3621
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.760047303512968e-06,
+ "loss": 0.5804,
+ "step": 3622
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7549952351655795e-06,
+ "loss": 0.6886,
+ "step": 3623
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.749945778483278e-06,
+ "loss": 0.6801,
+ "step": 3624
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7448989355777497e-06,
+ "loss": 0.6183,
+ "step": 3625
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7398547085595906e-06,
+ "loss": 0.7237,
+ "step": 3626
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.734813099538289e-06,
+ "loss": 0.6777,
+ "step": 3627
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7297741106222606e-06,
+ "loss": 0.6891,
+ "step": 3628
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.724737743918809e-06,
+ "loss": 0.685,
+ "step": 3629
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.719704001534149e-06,
+ "loss": 0.6472,
+ "step": 3630
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7146728855733947e-06,
+ "loss": 0.7425,
+ "step": 3631
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.7096443981405574e-06,
+ "loss": 0.6287,
+ "step": 3632
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.7046185413385573e-06,
+ "loss": 0.609,
+ "step": 3633
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.69959531726921e-06,
+ "loss": 0.7116,
+ "step": 3634
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.694574728033231e-06,
+ "loss": 0.6878,
+ "step": 3635
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6895567757302354e-06,
+ "loss": 0.7494,
+ "step": 3636
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6845414624587326e-06,
+ "loss": 0.5985,
+ "step": 3637
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6795287903161315e-06,
+ "loss": 0.719,
+ "step": 3638
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.674518761398733e-06,
+ "loss": 0.5979,
+ "step": 3639
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6695113778017364e-06,
+ "loss": 0.6121,
+ "step": 3640
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.664506641619232e-06,
+ "loss": 0.6205,
+ "step": 3641
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6595045549442034e-06,
+ "loss": 0.7504,
+ "step": 3642
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.65450511986853e-06,
+ "loss": 0.7174,
+ "step": 3643
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.649508338482972e-06,
+ "loss": 0.5828,
+ "step": 3644
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.644514212877187e-06,
+ "loss": 0.6456,
+ "step": 3645
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6395227451397285e-06,
+ "loss": 0.6812,
+ "step": 3646
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6345339373580267e-06,
+ "loss": 0.6717,
+ "step": 3647
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6295477916184107e-06,
+ "loss": 0.6971,
+ "step": 3648
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.62456431000608e-06,
+ "loss": 0.7057,
+ "step": 3649
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6195834946051357e-06,
+ "loss": 0.6997,
+ "step": 3650
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6146053474985565e-06,
+ "loss": 0.7337,
+ "step": 3651
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6096298707682075e-06,
+ "loss": 0.7044,
+ "step": 3652
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6046570664948357e-06,
+ "loss": 0.6965,
+ "step": 3653
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5996869367580713e-06,
+ "loss": 0.7635,
+ "step": 3654
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5947194836364264e-06,
+ "loss": 0.724,
+ "step": 3655
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5897547092072925e-06,
+ "loss": 0.6956,
+ "step": 3656
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5847926155469427e-06,
+ "loss": 0.6929,
+ "step": 3657
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.579833204730525e-06,
+ "loss": 0.7324,
+ "step": 3658
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.574876478832072e-06,
+ "loss": 0.5621,
+ "step": 3659
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.569922439924487e-06,
+ "loss": 0.6204,
+ "step": 3660
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5649710900795564e-06,
+ "loss": 0.6626,
+ "step": 3661
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.560022431367932e-06,
+ "loss": 0.7005,
+ "step": 3662
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5550764658591486e-06,
+ "loss": 0.7684,
+ "step": 3663
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.550133195621609e-06,
+ "loss": 0.6382,
+ "step": 3664
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5451926227225997e-06,
+ "loss": 0.6692,
+ "step": 3665
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.54025474922827e-06,
+ "loss": 0.5672,
+ "step": 3666
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5353195772036352e-06,
+ "loss": 0.7442,
+ "step": 3667
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.530387108712593e-06,
+ "loss": 0.6556,
+ "step": 3668
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.525457345817902e-06,
+ "loss": 0.443,
+ "step": 3669
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5205302905811946e-06,
+ "loss": 0.7258,
+ "step": 3670
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5156059450629677e-06,
+ "loss": 0.7379,
+ "step": 3671
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5106843113225854e-06,
+ "loss": 0.6211,
+ "step": 3672
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5057653914182787e-06,
+ "loss": 0.5749,
+ "step": 3673
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5008491874071427e-06,
+ "loss": 0.7207,
+ "step": 3674
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4959357013451368e-06,
+ "loss": 0.6317,
+ "step": 3675
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.491024935287086e-06,
+ "loss": 0.7668,
+ "step": 3676
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.486116891286674e-06,
+ "loss": 0.7509,
+ "step": 3677
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.481211571396451e-06,
+ "loss": 0.6749,
+ "step": 3678
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4763089776678206e-06,
+ "loss": 0.6174,
+ "step": 3679
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4714091121510517e-06,
+ "loss": 0.6535,
+ "step": 3680
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4665119768952736e-06,
+ "loss": 0.6835,
+ "step": 3681
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4616175739484657e-06,
+ "loss": 0.7109,
+ "step": 3682
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.456725905357482e-06,
+ "loss": 0.6707,
+ "step": 3683
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4518369731680114e-06,
+ "loss": 0.6231,
+ "step": 3684
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4469507794246105e-06,
+ "loss": 0.6753,
+ "step": 3685
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4420673261706906e-06,
+ "loss": 0.5957,
+ "step": 3686
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4371866154485147e-06,
+ "loss": 0.7307,
+ "step": 3687
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4323086492991975e-06,
+ "loss": 0.6061,
+ "step": 3688
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.42743342976271e-06,
+ "loss": 0.6809,
+ "step": 3689
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.422560958877874e-06,
+ "loss": 0.7144,
+ "step": 3690
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.417691238682351e-06,
+ "loss": 0.5982,
+ "step": 3691
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.412824271212671e-06,
+ "loss": 0.6608,
+ "step": 3692
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4079600585041993e-06,
+ "loss": 0.7344,
+ "step": 3693
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.403098602591152e-06,
+ "loss": 0.6075,
+ "step": 3694
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3982399055065953e-06,
+ "loss": 0.738,
+ "step": 3695
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.39338396928244e-06,
+ "loss": 0.6573,
+ "step": 3696
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3885307959494374e-06,
+ "loss": 0.6915,
+ "step": 3697
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3836803875371904e-06,
+ "loss": 0.6187,
+ "step": 3698
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.378832746074143e-06,
+ "loss": 0.6544,
+ "step": 3699
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3739878735875797e-06,
+ "loss": 0.7129,
+ "step": 3700
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.369145772103637e-06,
+ "loss": 0.6516,
+ "step": 3701
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.364306443647277e-06,
+ "loss": 0.7338,
+ "step": 3702
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.359469890242313e-06,
+ "loss": 0.6754,
+ "step": 3703
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3546361139113935e-06,
+ "loss": 0.5449,
+ "step": 3704
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.349805116676008e-06,
+ "loss": 0.6361,
+ "step": 3705
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3449769005564835e-06,
+ "loss": 0.6758,
+ "step": 3706
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3401514675719815e-06,
+ "loss": 0.6567,
+ "step": 3707
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3353288197405055e-06,
+ "loss": 0.5752,
+ "step": 3708
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3305089590788796e-06,
+ "loss": 0.7251,
+ "step": 3709
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3256918876027835e-06,
+ "loss": 0.7317,
+ "step": 3710
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3208776073267146e-06,
+ "loss": 0.6885,
+ "step": 3711
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.31606612026401e-06,
+ "loss": 0.6267,
+ "step": 3712
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3112574284268395e-06,
+ "loss": 0.6313,
+ "step": 3713
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3064515338261937e-06,
+ "loss": 0.635,
+ "step": 3714
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.301648438471905e-06,
+ "loss": 0.6605,
+ "step": 3715
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2968481443726306e-06,
+ "loss": 0.6465,
+ "step": 3716
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2920506535358566e-06,
+ "loss": 0.6341,
+ "step": 3717
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2872559679678972e-06,
+ "loss": 0.672,
+ "step": 3718
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2824640896738935e-06,
+ "loss": 0.7277,
+ "step": 3719
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2776750206578113e-06,
+ "loss": 0.6277,
+ "step": 3720
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2728887629224415e-06,
+ "loss": 0.7548,
+ "step": 3721
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.268105318469402e-06,
+ "loss": 0.665,
+ "step": 3722
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.263324689299132e-06,
+ "loss": 0.6222,
+ "step": 3723
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2585468774108943e-06,
+ "loss": 0.7394,
+ "step": 3724
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2537718848027744e-06,
+ "loss": 0.6136,
+ "step": 3725
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2489997134716723e-06,
+ "loss": 0.6787,
+ "step": 3726
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2442303654133124e-06,
+ "loss": 0.5751,
+ "step": 3727
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.239463842622247e-06,
+ "loss": 0.6387,
+ "step": 3728
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.234700147091834e-06,
+ "loss": 0.524,
+ "step": 3729
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.229939280814257e-06,
+ "loss": 0.6619,
+ "step": 3730
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.22518124578051e-06,
+ "loss": 0.752,
+ "step": 3731
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.220426043980407e-06,
+ "loss": 0.6895,
+ "step": 3732
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.2156736774025766e-06,
+ "loss": 0.7207,
+ "step": 3733
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.2109241480344624e-06,
+ "loss": 0.684,
+ "step": 3734
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.206177457862321e-06,
+ "loss": 0.6942,
+ "step": 3735
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.2014336088712194e-06,
+ "loss": 0.6971,
+ "step": 3736
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1966926030450408e-06,
+ "loss": 0.7369,
+ "step": 3737
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.191954442366475e-06,
+ "loss": 0.7226,
+ "step": 3738
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.187219128817025e-06,
+ "loss": 0.7359,
+ "step": 3739
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1824866643770024e-06,
+ "loss": 0.6251,
+ "step": 3740
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1777570510255262e-06,
+ "loss": 0.6854,
+ "step": 3741
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.173030290740524e-06,
+ "loss": 0.7542,
+ "step": 3742
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.168306385498733e-06,
+ "loss": 0.6272,
+ "step": 3743
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1635853372756874e-06,
+ "loss": 0.6857,
+ "step": 3744
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1588671480457324e-06,
+ "loss": 0.7213,
+ "step": 3745
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.154151819782024e-06,
+ "loss": 0.7613,
+ "step": 3746
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.149439354456514e-06,
+ "loss": 0.554,
+ "step": 3747
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.144729754039959e-06,
+ "loss": 0.649,
+ "step": 3748
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1400230205019124e-06,
+ "loss": 0.6398,
+ "step": 3749
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1353191558107367e-06,
+ "loss": 0.7006,
+ "step": 3750
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1306181619335894e-06,
+ "loss": 0.674,
+ "step": 3751
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.125920040836431e-06,
+ "loss": 0.7252,
+ "step": 3752
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.121224794484019e-06,
+ "loss": 0.6414,
+ "step": 3753
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1165324248399065e-06,
+ "loss": 0.6689,
+ "step": 3754
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1118429338664467e-06,
+ "loss": 0.6468,
+ "step": 3755
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1071563235247883e-06,
+ "loss": 0.5905,
+ "step": 3756
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.102472595774875e-06,
+ "loss": 0.6123,
+ "step": 3757
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0977917525754432e-06,
+ "loss": 0.5846,
+ "step": 3758
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.093113795884026e-06,
+ "loss": 0.5958,
+ "step": 3759
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0884387276569506e-06,
+ "loss": 0.6389,
+ "step": 3760
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0837665498493273e-06,
+ "loss": 0.6704,
+ "step": 3761
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0790972644150674e-06,
+ "loss": 0.7076,
+ "step": 3762
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.074430873306865e-06,
+ "loss": 0.6335,
+ "step": 3763
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.069767378476216e-06,
+ "loss": 0.5954,
+ "step": 3764
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0651067818733948e-06,
+ "loss": 0.712,
+ "step": 3765
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.060449085447462e-06,
+ "loss": 0.7111,
+ "step": 3766
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.05579429114627e-06,
+ "loss": 0.6879,
+ "step": 3767
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.051142400916459e-06,
+ "loss": 0.7081,
+ "step": 3768
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.046493416703452e-06,
+ "loss": 0.6373,
+ "step": 3769
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.041847340451456e-06,
+ "loss": 0.6978,
+ "step": 3770
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0372041741034642e-06,
+ "loss": 0.6218,
+ "step": 3771
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0325639196012524e-06,
+ "loss": 0.5594,
+ "step": 3772
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0279265788853763e-06,
+ "loss": 0.7576,
+ "step": 3773
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0232921538951763e-06,
+ "loss": 0.5581,
+ "step": 3774
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0186606465687705e-06,
+ "loss": 0.6653,
+ "step": 3775
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0140320588430607e-06,
+ "loss": 0.6348,
+ "step": 3776
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0094063926537233e-06,
+ "loss": 0.6008,
+ "step": 3777
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.004783649935219e-06,
+ "loss": 0.7476,
+ "step": 3778
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.000163832620775e-06,
+ "loss": 0.59,
+ "step": 3779
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.9955469426424056e-06,
+ "loss": 0.6162,
+ "step": 3780
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.990932981930893e-06,
+ "loss": 0.5865,
+ "step": 3781
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.986321952415806e-06,
+ "loss": 0.5959,
+ "step": 3782
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.98171385602548e-06,
+ "loss": 0.5887,
+ "step": 3783
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9771086946870177e-06,
+ "loss": 0.65,
+ "step": 3784
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.972506470326303e-06,
+ "loss": 0.6387,
+ "step": 3785
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9679071848679896e-06,
+ "loss": 0.5422,
+ "step": 3786
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9633108402355027e-06,
+ "loss": 0.6905,
+ "step": 3787
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9587174383510343e-06,
+ "loss": 0.6709,
+ "step": 3788
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.95412698113555e-06,
+ "loss": 0.7551,
+ "step": 3789
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.949539470508782e-06,
+ "loss": 0.7038,
+ "step": 3790
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.944954908389229e-06,
+ "loss": 0.6984,
+ "step": 3791
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.94037329669416e-06,
+ "loss": 0.7535,
+ "step": 3792
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.935794637339605e-06,
+ "loss": 0.7002,
+ "step": 3793
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.931218932240365e-06,
+ "loss": 0.6734,
+ "step": 3794
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9266461833100047e-06,
+ "loss": 0.7254,
+ "step": 3795
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9220763924608442e-06,
+ "loss": 0.7101,
+ "step": 3796
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.917509561603977e-06,
+ "loss": 0.6957,
+ "step": 3797
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.912945692649255e-06,
+ "loss": 0.7679,
+ "step": 3798
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9083847875052894e-06,
+ "loss": 0.6927,
+ "step": 3799
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9038268480794564e-06,
+ "loss": 0.6566,
+ "step": 3800
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.899271876277887e-06,
+ "loss": 0.6614,
+ "step": 3801
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.894719874005475e-06,
+ "loss": 0.6577,
+ "step": 3802
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.890170843165868e-06,
+ "loss": 0.634,
+ "step": 3803
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8856247856614772e-06,
+ "loss": 0.6428,
+ "step": 3804
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.881081703393466e-06,
+ "loss": 0.7297,
+ "step": 3805
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8765415982617516e-06,
+ "loss": 0.6638,
+ "step": 3806
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8720044721650155e-06,
+ "loss": 0.7079,
+ "step": 3807
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8674703270006786e-06,
+ "loss": 0.6813,
+ "step": 3808
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.862939164664924e-06,
+ "loss": 0.7017,
+ "step": 3809
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8584109870526932e-06,
+ "loss": 0.6923,
+ "step": 3810
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8538857960576694e-06,
+ "loss": 0.6374,
+ "step": 3811
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.849363593572293e-06,
+ "loss": 0.544,
+ "step": 3812
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.844844381487748e-06,
+ "loss": 0.6486,
+ "step": 3813
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8403281616939725e-06,
+ "loss": 0.6714,
+ "step": 3814
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.835814936079655e-06,
+ "loss": 0.7819,
+ "step": 3815
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8313047065322275e-06,
+ "loss": 0.6837,
+ "step": 3816
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8267974749378714e-06,
+ "loss": 0.6032,
+ "step": 3817
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.822293243181513e-06,
+ "loss": 0.5896,
+ "step": 3818
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8177920131468273e-06,
+ "loss": 0.6502,
+ "step": 3819
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8132937867162302e-06,
+ "loss": 0.6117,
+ "step": 3820
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.808798565770883e-06,
+ "loss": 0.6947,
+ "step": 3821
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8043063521906897e-06,
+ "loss": 0.6853,
+ "step": 3822
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.799817147854299e-06,
+ "loss": 0.598,
+ "step": 3823
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.795330954639096e-06,
+ "loss": 0.6782,
+ "step": 3824
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.790847774421215e-06,
+ "loss": 0.6019,
+ "step": 3825
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7863676090755175e-06,
+ "loss": 0.6272,
+ "step": 3826
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7818904604756125e-06,
+ "loss": 0.717,
+ "step": 3827
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7774163304938516e-06,
+ "loss": 0.6711,
+ "step": 3828
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7729452210013173e-06,
+ "loss": 0.7064,
+ "step": 3829
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7684771338678316e-06,
+ "loss": 0.6361,
+ "step": 3830
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.764012070961948e-06,
+ "loss": 0.6747,
+ "step": 3831
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7595500341509595e-06,
+ "loss": 0.6612,
+ "step": 3832
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7550910253008933e-06,
+ "loss": 0.7042,
+ "step": 3833
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7506350462765095e-06,
+ "loss": 0.6556,
+ "step": 3834
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7461820989413036e-06,
+ "loss": 0.6534,
+ "step": 3835
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7417321851574987e-06,
+ "loss": 0.6936,
+ "step": 3836
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.737285306786053e-06,
+ "loss": 0.7639,
+ "step": 3837
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7328414656866518e-06,
+ "loss": 0.7268,
+ "step": 3838
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7284006637177164e-06,
+ "loss": 0.6867,
+ "step": 3839
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.723962902736389e-06,
+ "loss": 0.7122,
+ "step": 3840
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7195281845985467e-06,
+ "loss": 0.6609,
+ "step": 3841
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7150965111587935e-06,
+ "loss": 0.6359,
+ "step": 3842
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7106678842704514e-06,
+ "loss": 0.7093,
+ "step": 3843
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7062423057855804e-06,
+ "loss": 0.665,
+ "step": 3844
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.701819777554956e-06,
+ "loss": 0.676,
+ "step": 3845
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.697400301428087e-06,
+ "loss": 0.5424,
+ "step": 3846
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6929838792532035e-06,
+ "loss": 0.638,
+ "step": 3847
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.688570512877249e-06,
+ "loss": 0.679,
+ "step": 3848
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.684160204145899e-06,
+ "loss": 0.6204,
+ "step": 3849
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6797529549035482e-06,
+ "loss": 0.6271,
+ "step": 3850
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.675348766993311e-06,
+ "loss": 0.6341,
+ "step": 3851
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6709476422570215e-06,
+ "loss": 0.6929,
+ "step": 3852
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.666549582535233e-06,
+ "loss": 0.6577,
+ "step": 3853
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.662154589667217e-06,
+ "loss": 0.7158,
+ "step": 3854
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6577626654909617e-06,
+ "loss": 0.6958,
+ "step": 3855
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.653373811843174e-06,
+ "loss": 0.6389,
+ "step": 3856
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.648988030559274e-06,
+ "loss": 0.6628,
+ "step": 3857
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6446053234734004e-06,
+ "loss": 0.69,
+ "step": 3858
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.640225692418401e-06,
+ "loss": 0.639,
+ "step": 3859
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6358491392258455e-06,
+ "loss": 0.7679,
+ "step": 3860
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6314756657260053e-06,
+ "loss": 0.6304,
+ "step": 3861
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.627105273747873e-06,
+ "loss": 0.7285,
+ "step": 3862
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.622737965119145e-06,
+ "loss": 0.6471,
+ "step": 3863
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6183737416662403e-06,
+ "loss": 0.7019,
+ "step": 3864
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6140126052142788e-06,
+ "loss": 0.7547,
+ "step": 3865
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6096545575870845e-06,
+ "loss": 0.747,
+ "step": 3866
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6052996006072008e-06,
+ "loss": 0.6179,
+ "step": 3867
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6009477360958713e-06,
+ "loss": 0.4432,
+ "step": 3868
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5965989658730485e-06,
+ "loss": 0.6201,
+ "step": 3869
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.592253291757393e-06,
+ "loss": 0.6743,
+ "step": 3870
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5879107155662676e-06,
+ "loss": 0.7009,
+ "step": 3871
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5835712391157395e-06,
+ "loss": 0.6286,
+ "step": 3872
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5792348642205823e-06,
+ "loss": 0.6524,
+ "step": 3873
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.574901592694269e-06,
+ "loss": 0.5609,
+ "step": 3874
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.570571426348978e-06,
+ "loss": 0.6385,
+ "step": 3875
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.566244366995587e-06,
+ "loss": 0.6847,
+ "step": 3876
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5619204164436796e-06,
+ "loss": 0.6633,
+ "step": 3877
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.557599576501526e-06,
+ "loss": 0.6185,
+ "step": 3878
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.553281848976109e-06,
+ "loss": 0.7413,
+ "step": 3879
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5489672356731057e-06,
+ "loss": 0.7192,
+ "step": 3880
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.544655738396885e-06,
+ "loss": 0.5788,
+ "step": 3881
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.540347358950529e-06,
+ "loss": 0.6707,
+ "step": 3882
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.536042099135795e-06,
+ "loss": 0.7384,
+ "step": 3883
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.531739960753149e-06,
+ "loss": 0.584,
+ "step": 3884
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.527440945601747e-06,
+ "loss": 0.6654,
+ "step": 3885
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.523145055479441e-06,
+ "loss": 0.7092,
+ "step": 3886
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.518852292182774e-06,
+ "loss": 0.5679,
+ "step": 3887
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5145626575069835e-06,
+ "loss": 0.5896,
+ "step": 3888
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5102761532460005e-06,
+ "loss": 0.611,
+ "step": 3889
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5059927811924346e-06,
+ "loss": 0.701,
+ "step": 3890
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.501712543137602e-06,
+ "loss": 0.7507,
+ "step": 3891
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.497435440871502e-06,
+ "loss": 0.6286,
+ "step": 3892
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4931614761828182e-06,
+ "loss": 0.6995,
+ "step": 3893
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4888906508589293e-06,
+ "loss": 0.7279,
+ "step": 3894
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.484622966685892e-06,
+ "loss": 0.6674,
+ "step": 3895
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.480358425448457e-06,
+ "loss": 0.6292,
+ "step": 3896
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.476097028930058e-06,
+ "loss": 0.6651,
+ "step": 3897
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4718387789128153e-06,
+ "loss": 0.5386,
+ "step": 3898
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4675836771775265e-06,
+ "loss": 0.6127,
+ "step": 3899
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4633317255036894e-06,
+ "loss": 0.6311,
+ "step": 3900
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4590829256694615e-06,
+ "loss": 0.6963,
+ "step": 3901
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4548372794516985e-06,
+ "loss": 0.6129,
+ "step": 3902
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.450594788625932e-06,
+ "loss": 0.7165,
+ "step": 3903
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.446355454966374e-06,
+ "loss": 0.6666,
+ "step": 3904
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.442119280245917e-06,
+ "loss": 0.6554,
+ "step": 3905
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4378862662361324e-06,
+ "loss": 0.6318,
+ "step": 3906
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.433656414707272e-06,
+ "loss": 0.602,
+ "step": 3907
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4294297274282517e-06,
+ "loss": 0.6218,
+ "step": 3908
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.425206206166688e-06,
+ "loss": 0.6812,
+ "step": 3909
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.420985852688854e-06,
+ "loss": 0.7445,
+ "step": 3910
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.416768668759705e-06,
+ "loss": 0.6121,
+ "step": 3911
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4125546561428735e-06,
+ "loss": 0.6608,
+ "step": 3912
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.408343816600657e-06,
+ "loss": 0.6164,
+ "step": 3913
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.404136151894032e-06,
+ "loss": 0.672,
+ "step": 3914
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3999316637826486e-06,
+ "loss": 0.7307,
+ "step": 3915
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3957303540248265e-06,
+ "loss": 0.7199,
+ "step": 3916
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3915322243775564e-06,
+ "loss": 0.7631,
+ "step": 3917
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3873372765964964e-06,
+ "loss": 0.6412,
+ "step": 3918
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3831455124359793e-06,
+ "loss": 0.6269,
+ "step": 3919
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3789569336490025e-06,
+ "loss": 0.6488,
+ "step": 3920
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.374771541987232e-06,
+ "loss": 0.7651,
+ "step": 3921
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.370589339201003e-06,
+ "loss": 0.6518,
+ "step": 3922
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3664103270393123e-06,
+ "loss": 0.6648,
+ "step": 3923
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.362234507249832e-06,
+ "loss": 0.7085,
+ "step": 3924
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.358061881578885e-06,
+ "loss": 0.6401,
+ "step": 3925
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3538924517714656e-06,
+ "loss": 0.631,
+ "step": 3926
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.349726219571239e-06,
+ "loss": 0.6718,
+ "step": 3927
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.345563186720522e-06,
+ "loss": 0.5603,
+ "step": 3928
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3414033549603022e-06,
+ "loss": 0.6554,
+ "step": 3929
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.337246726030218e-06,
+ "loss": 0.6282,
+ "step": 3930
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3330933016685753e-06,
+ "loss": 0.6345,
+ "step": 3931
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3289430836123417e-06,
+ "loss": 0.6566,
+ "step": 3932
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3247960735971385e-06,
+ "loss": 0.648,
+ "step": 3933
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.32065227335725e-06,
+ "loss": 0.5856,
+ "step": 3934
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3165116846256154e-06,
+ "loss": 0.6334,
+ "step": 3935
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.312374309133831e-06,
+ "loss": 0.6797,
+ "step": 3936
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.308240148612152e-06,
+ "loss": 0.6509,
+ "step": 3937
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3041092047894843e-06,
+ "loss": 0.594,
+ "step": 3938
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.299981479393393e-06,
+ "loss": 0.7166,
+ "step": 3939
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2958569741500946e-06,
+ "loss": 0.693,
+ "step": 3940
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.291735690784461e-06,
+ "loss": 0.683,
+ "step": 3941
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2876176310200184e-06,
+ "loss": 0.6204,
+ "step": 3942
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2835027965789345e-06,
+ "loss": 0.6681,
+ "step": 3943
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.279391189182042e-06,
+ "loss": 0.556,
+ "step": 3944
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.275282810548811e-06,
+ "loss": 0.6693,
+ "step": 3945
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.271177662397377e-06,
+ "loss": 0.6704,
+ "step": 3946
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2670757464445146e-06,
+ "loss": 0.5308,
+ "step": 3947
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2629770644056415e-06,
+ "loss": 0.6359,
+ "step": 3948
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2588816179948324e-06,
+ "loss": 0.6078,
+ "step": 3949
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2547894089248055e-06,
+ "loss": 0.6277,
+ "step": 3950
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2507004389069267e-06,
+ "loss": 0.6537,
+ "step": 3951
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2466147096512037e-06,
+ "loss": 0.6892,
+ "step": 3952
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2425322228662914e-06,
+ "loss": 0.6203,
+ "step": 3953
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.238452980259491e-06,
+ "loss": 0.6119,
+ "step": 3954
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2343769835367425e-06,
+ "loss": 0.7082,
+ "step": 3955
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2303042344026303e-06,
+ "loss": 0.6245,
+ "step": 3956
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.226234734560381e-06,
+ "loss": 0.6998,
+ "step": 3957
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.222168485711863e-06,
+ "loss": 0.7296,
+ "step": 3958
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2181054895575847e-06,
+ "loss": 0.6549,
+ "step": 3959
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2140457477966916e-06,
+ "loss": 0.7525,
+ "step": 3960
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2099892621269705e-06,
+ "loss": 0.6748,
+ "step": 3961
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2059360342448467e-06,
+ "loss": 0.6892,
+ "step": 3962
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.20188606584538e-06,
+ "loss": 0.7374,
+ "step": 3963
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.197839358622279e-06,
+ "loss": 0.4352,
+ "step": 3964
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.193795914267871e-06,
+ "loss": 0.6691,
+ "step": 3965
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1897557344731292e-06,
+ "loss": 0.6175,
+ "step": 3966
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1857188209276605e-06,
+ "loss": 0.6014,
+ "step": 3967
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1816851753197023e-06,
+ "loss": 0.7403,
+ "step": 3968
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1776547993361306e-06,
+ "loss": 0.7162,
+ "step": 3969
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.17362769466245e-06,
+ "loss": 0.7025,
+ "step": 3970
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.169603862982801e-06,
+ "loss": 0.6135,
+ "step": 3971
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1655833059799447e-06,
+ "loss": 0.6059,
+ "step": 3972
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1615660253352888e-06,
+ "loss": 0.7306,
+ "step": 3973
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1575520227288605e-06,
+ "loss": 0.6302,
+ "step": 3974
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.153541299839318e-06,
+ "loss": 0.7424,
+ "step": 3975
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1495338583439497e-06,
+ "loss": 0.7586,
+ "step": 3976
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.145529699918666e-06,
+ "loss": 0.5375,
+ "step": 3977
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.141528826238012e-06,
+ "loss": 0.6889,
+ "step": 3978
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1375312389751546e-06,
+ "loss": 0.6006,
+ "step": 3979
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.133536939801888e-06,
+ "loss": 0.675,
+ "step": 3980
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.129545930388627e-06,
+ "loss": 0.5835,
+ "step": 3981
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.125558212404424e-06,
+ "loss": 0.6543,
+ "step": 3982
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.121573787516936e-06,
+ "loss": 0.5867,
+ "step": 3983
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.1175926573924543e-06,
+ "loss": 0.7383,
+ "step": 3984
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.113614823695892e-06,
+ "loss": 0.5716,
+ "step": 3985
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.109640288090781e-06,
+ "loss": 0.6049,
+ "step": 3986
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.105669052239274e-06,
+ "loss": 0.6559,
+ "step": 3987
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.101701117802145e-06,
+ "loss": 0.7165,
+ "step": 3988
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0977364864387896e-06,
+ "loss": 0.6994,
+ "step": 3989
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.093775159807211e-06,
+ "loss": 0.75,
+ "step": 3990
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0898171395640463e-06,
+ "loss": 0.6715,
+ "step": 3991
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.08586242736454e-06,
+ "loss": 0.7675,
+ "step": 3992
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.081911024862554e-06,
+ "loss": 0.5701,
+ "step": 3993
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.077962933710572e-06,
+ "loss": 0.6573,
+ "step": 3994
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.074018155559683e-06,
+ "loss": 0.6481,
+ "step": 3995
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0700766920595973e-06,
+ "loss": 0.5685,
+ "step": 3996
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.066138544858637e-06,
+ "loss": 0.6148,
+ "step": 3997
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0622037156037388e-06,
+ "loss": 0.6811,
+ "step": 3998
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0582722059404515e-06,
+ "loss": 0.6523,
+ "step": 3999
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.054344017512934e-06,
+ "loss": 0.6871,
+ "step": 4000
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.050419151963957e-06,
+ "loss": 0.7068,
+ "step": 4001
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.046497610934903e-06,
+ "loss": 0.7241,
+ "step": 4002
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0425793960657623e-06,
+ "loss": 0.6584,
+ "step": 4003
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.038664508995135e-06,
+ "loss": 0.6605,
+ "step": 4004
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0347529513602305e-06,
+ "loss": 0.6036,
+ "step": 4005
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0308447247968654e-06,
+ "loss": 0.6259,
+ "step": 4006
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0269398309394585e-06,
+ "loss": 0.6315,
+ "step": 4007
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0230382714210384e-06,
+ "loss": 0.6904,
+ "step": 4008
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.019140047873246e-06,
+ "loss": 0.7089,
+ "step": 4009
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0152451619263182e-06,
+ "loss": 0.6314,
+ "step": 4010
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0113536152091006e-06,
+ "loss": 0.6677,
+ "step": 4011
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0074654093490365e-06,
+ "loss": 0.6122,
+ "step": 4012
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0035805459721768e-06,
+ "loss": 0.5877,
+ "step": 4013
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.999699026703177e-06,
+ "loss": 0.6042,
+ "step": 4014
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9958208531652876e-06,
+ "loss": 0.6507,
+ "step": 4015
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.991946026980366e-06,
+ "loss": 0.6329,
+ "step": 4016
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9880745497688658e-06,
+ "loss": 0.7108,
+ "step": 4017
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9842064231498415e-06,
+ "loss": 0.7526,
+ "step": 4018
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9803416487409465e-06,
+ "loss": 0.7669,
+ "step": 4019
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9764802281584315e-06,
+ "loss": 0.6029,
+ "step": 4020
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.972622163017145e-06,
+ "loss": 0.6891,
+ "step": 4021
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9687674549305335e-06,
+ "loss": 0.5707,
+ "step": 4022
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.964916105510637e-06,
+ "loss": 0.6515,
+ "step": 4023
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9610681163680957e-06,
+ "loss": 0.5911,
+ "step": 4024
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9572234891121354e-06,
+ "loss": 0.6366,
+ "step": 4025
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.953382225350583e-06,
+ "loss": 0.6561,
+ "step": 4026
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9495443266898615e-06,
+ "loss": 0.7092,
+ "step": 4027
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9457097947349802e-06,
+ "loss": 0.7049,
+ "step": 4028
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9418786310895467e-06,
+ "loss": 0.6964,
+ "step": 4029
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9380508373557496e-06,
+ "loss": 0.659,
+ "step": 4030
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.934226415134377e-06,
+ "loss": 0.7126,
+ "step": 4031
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9304053660248066e-06,
+ "loss": 0.587,
+ "step": 4032
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9265876916250026e-06,
+ "loss": 0.688,
+ "step": 4033
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9227733935315187e-06,
+ "loss": 0.7444,
+ "step": 4034
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.918962473339496e-06,
+ "loss": 0.553,
+ "step": 4035
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9151549326426654e-06,
+ "loss": 0.6676,
+ "step": 4036
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9113507730333435e-06,
+ "loss": 0.5779,
+ "step": 4037
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.90754999610243e-06,
+ "loss": 0.7099,
+ "step": 4038
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9037526034394149e-06,
+ "loss": 0.4888,
+ "step": 4039
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8999585966323674e-06,
+ "loss": 0.7722,
+ "step": 4040
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8961679772679486e-06,
+ "loss": 0.6735,
+ "step": 4041
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8923807469313927e-06,
+ "loss": 0.7517,
+ "step": 4042
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8885969072065224e-06,
+ "loss": 0.6839,
+ "step": 4043
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8848164596757413e-06,
+ "loss": 0.6297,
+ "step": 4044
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.88103940592004e-06,
+ "loss": 0.6963,
+ "step": 4045
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8772657475189849e-06,
+ "loss": 0.7048,
+ "step": 4046
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.873495486050716e-06,
+ "loss": 0.6251,
+ "step": 4047
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.869728623091963e-06,
+ "loss": 0.5572,
+ "step": 4048
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8659651602180295e-06,
+ "loss": 0.6946,
+ "step": 4049
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8622050990027996e-06,
+ "loss": 0.6384,
+ "step": 4050
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.858448441018732e-06,
+ "loss": 0.7228,
+ "step": 4051
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8546951878368635e-06,
+ "loss": 0.764,
+ "step": 4052
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8509453410268086e-06,
+ "loss": 0.6942,
+ "step": 4053
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8471989021567537e-06,
+ "loss": 0.6169,
+ "step": 4054
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8434558727934636e-06,
+ "loss": 0.6467,
+ "step": 4055
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8397162545022751e-06,
+ "loss": 0.7627,
+ "step": 4056
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.835980048847098e-06,
+ "loss": 0.6896,
+ "step": 4057
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8322472573904204e-06,
+ "loss": 0.5712,
+ "step": 4058
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8285178816932913e-06,
+ "loss": 0.7457,
+ "step": 4059
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.824791923315341e-06,
+ "loss": 0.617,
+ "step": 4060
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8210693838147686e-06,
+ "loss": 0.7346,
+ "step": 4061
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8173502647483398e-06,
+ "loss": 0.5456,
+ "step": 4062
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8136345676713962e-06,
+ "loss": 0.4922,
+ "step": 4063
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.809922294137847e-06,
+ "loss": 0.6961,
+ "step": 4064
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8062134457001612e-06,
+ "loss": 0.6664,
+ "step": 4065
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.802508023909384e-06,
+ "loss": 0.71,
+ "step": 4066
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7988060303151256e-06,
+ "loss": 0.6334,
+ "step": 4067
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.795107466465562e-06,
+ "loss": 0.6336,
+ "step": 4068
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7914123339074351e-06,
+ "loss": 0.6112,
+ "step": 4069
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.787720634186052e-06,
+ "loss": 0.6666,
+ "step": 4070
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7840323688452833e-06,
+ "loss": 0.7445,
+ "step": 4071
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7803475394275637e-06,
+ "loss": 0.7056,
+ "step": 4072
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7766661474738933e-06,
+ "loss": 0.6424,
+ "step": 4073
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7729881945238303e-06,
+ "loss": 0.6876,
+ "step": 4074
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7693136821154989e-06,
+ "loss": 0.7316,
+ "step": 4075
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.765642611785584e-06,
+ "loss": 0.5921,
+ "step": 4076
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7619749850693235e-06,
+ "loss": 0.6396,
+ "step": 4077
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7583108035005269e-06,
+ "loss": 0.6257,
+ "step": 4078
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7546500686115542e-06,
+ "loss": 0.7564,
+ "step": 4079
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7509927819333272e-06,
+ "loss": 0.6802,
+ "step": 4080
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7473389449953304e-06,
+ "loss": 0.709,
+ "step": 4081
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.743688559325596e-06,
+ "loss": 0.6859,
+ "step": 4082
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7400416264507192e-06,
+ "loss": 0.7039,
+ "step": 4083
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.736398147895848e-06,
+ "loss": 0.6444,
+ "step": 4084
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7327581251846902e-06,
+ "loss": 0.6819,
+ "step": 4085
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.729121559839504e-06,
+ "loss": 0.6648,
+ "step": 4086
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7254884533811034e-06,
+ "loss": 0.6048,
+ "step": 4087
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.72185880732886e-06,
+ "loss": 0.6669,
+ "step": 4088
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7182326232006875e-06,
+ "loss": 0.7326,
+ "step": 4089
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.714609902513059e-06,
+ "loss": 0.6778,
+ "step": 4090
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7109906467810033e-06,
+ "loss": 0.6515,
+ "step": 4091
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7073748575180937e-06,
+ "loss": 0.7335,
+ "step": 4092
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7037625362364585e-06,
+ "loss": 0.6406,
+ "step": 4093
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.700153684446767e-06,
+ "loss": 0.654,
+ "step": 4094
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6965483036582454e-06,
+ "loss": 0.703,
+ "step": 4095
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6929463953786673e-06,
+ "loss": 0.6432,
+ "step": 4096
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6893479611143527e-06,
+ "loss": 0.6805,
+ "step": 4097
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6857530023701695e-06,
+ "loss": 0.7049,
+ "step": 4098
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6821615206495313e-06,
+ "loss": 0.7088,
+ "step": 4099
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6785735174543983e-06,
+ "loss": 0.7459,
+ "step": 4100
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.674988994285276e-06,
+ "loss": 0.6727,
+ "step": 4101
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6714079526412142e-06,
+ "loss": 0.6418,
+ "step": 4102
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.667830394019807e-06,
+ "loss": 0.6015,
+ "step": 4103
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6642563199171914e-06,
+ "loss": 0.6804,
+ "step": 4104
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6606857318280478e-06,
+ "loss": 0.6734,
+ "step": 4105
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6571186312456011e-06,
+ "loss": 0.7647,
+ "step": 4106
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6535550196616102e-06,
+ "loss": 0.6569,
+ "step": 4107
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6499948985663783e-06,
+ "loss": 0.6444,
+ "step": 4108
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6464382694487579e-06,
+ "loss": 0.7211,
+ "step": 4109
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6428851337961294e-06,
+ "loss": 0.4994,
+ "step": 4110
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6393354930944194e-06,
+ "loss": 0.7371,
+ "step": 4111
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.635789348828084e-06,
+ "loss": 0.7495,
+ "step": 4112
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6322467024801282e-06,
+ "loss": 0.6738,
+ "step": 4113
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6287075555320864e-06,
+ "loss": 0.7261,
+ "step": 4114
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6251719094640328e-06,
+ "loss": 0.5722,
+ "step": 4115
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.621639765754578e-06,
+ "loss": 0.6714,
+ "step": 4116
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6181111258808658e-06,
+ "loss": 0.6507,
+ "step": 4117
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6145859913185757e-06,
+ "loss": 0.6337,
+ "step": 4118
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.611064363541922e-06,
+ "loss": 0.6552,
+ "step": 4119
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6075462440236512e-06,
+ "loss": 0.6524,
+ "step": 4120
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6040316342350425e-06,
+ "loss": 0.6474,
+ "step": 4121
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6005205356459097e-06,
+ "loss": 0.6307,
+ "step": 4122
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.597012949724598e-06,
+ "loss": 0.5668,
+ "step": 4123
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.593508877937977e-06,
+ "loss": 0.4631,
+ "step": 4124
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.590008321751454e-06,
+ "loss": 0.7062,
+ "step": 4125
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.586511282628963e-06,
+ "loss": 0.7146,
+ "step": 4126
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5830177620329712e-06,
+ "loss": 0.6878,
+ "step": 4127
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.579527761424472e-06,
+ "loss": 0.6514,
+ "step": 4128
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5760412822629822e-06,
+ "loss": 0.6638,
+ "step": 4129
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.572558326006549e-06,
+ "loss": 0.7244,
+ "step": 4130
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5690788941117508e-06,
+ "loss": 0.6602,
+ "step": 4131
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5656029880336854e-06,
+ "loss": 0.6304,
+ "step": 4132
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5621306092259804e-06,
+ "loss": 0.6175,
+ "step": 4133
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.558661759140786e-06,
+ "loss": 0.7321,
+ "step": 4134
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5551964392287788e-06,
+ "loss": 0.6775,
+ "step": 4135
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5517346509391563e-06,
+ "loss": 0.5859,
+ "step": 4136
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5482763957196423e-06,
+ "loss": 0.6071,
+ "step": 4137
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5448216750164803e-06,
+ "loss": 0.6495,
+ "step": 4138
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5413704902744364e-06,
+ "loss": 0.704,
+ "step": 4139
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5379228429368021e-06,
+ "loss": 0.5902,
+ "step": 4140
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5344787344453803e-06,
+ "loss": 0.6857,
+ "step": 4141
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5310381662405017e-06,
+ "loss": 0.742,
+ "step": 4142
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5276011397610136e-06,
+ "loss": 0.6876,
+ "step": 4143
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.524167656444282e-06,
+ "loss": 0.6698,
+ "step": 4144
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.520737717726195e-06,
+ "loss": 0.6015,
+ "step": 4145
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5173113250411554e-06,
+ "loss": 0.5668,
+ "step": 4146
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5138884798220798e-06,
+ "loss": 0.6411,
+ "step": 4147
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.510469183500405e-06,
+ "loss": 0.6667,
+ "step": 4148
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.507053437506083e-06,
+ "loss": 0.76,
+ "step": 4149
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5036412432675818e-06,
+ "loss": 0.6456,
+ "step": 4150
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.500232602211883e-06,
+ "loss": 0.6614,
+ "step": 4151
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4968275157644817e-06,
+ "loss": 0.6989,
+ "step": 4152
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4934259853493883e-06,
+ "loss": 0.7697,
+ "step": 4153
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4900280123891253e-06,
+ "loss": 0.6956,
+ "step": 4154
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4866335983047264e-06,
+ "loss": 0.6892,
+ "step": 4155
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4832427445157383e-06,
+ "loss": 0.7041,
+ "step": 4156
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4798554524402176e-06,
+ "loss": 0.5999,
+ "step": 4157
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4764717234947346e-06,
+ "loss": 0.615,
+ "step": 4158
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4730915590943628e-06,
+ "loss": 0.6942,
+ "step": 4159
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4697149606526918e-06,
+ "loss": 0.7075,
+ "step": 4160
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.466341929581816e-06,
+ "loss": 0.6308,
+ "step": 4161
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4629724672923384e-06,
+ "loss": 0.5937,
+ "step": 4162
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4596065751933763e-06,
+ "loss": 0.6581,
+ "step": 4163
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4562442546925404e-06,
+ "loss": 0.6822,
+ "step": 4164
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.452885507195959e-06,
+ "loss": 0.6913,
+ "step": 4165
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4495303341082622e-06,
+ "loss": 0.6798,
+ "step": 4166
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4461787368325863e-06,
+ "loss": 0.7019,
+ "step": 4167
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4428307167705692e-06,
+ "loss": 0.64,
+ "step": 4168
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.439486275322357e-06,
+ "loss": 0.6435,
+ "step": 4169
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4361454138865983e-06,
+ "loss": 0.7136,
+ "step": 4170
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4328081338604383e-06,
+ "loss": 0.6806,
+ "step": 4171
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4294744366395351e-06,
+ "loss": 0.7973,
+ "step": 4172
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4261443236180418e-06,
+ "loss": 0.6777,
+ "step": 4173
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4228177961886136e-06,
+ "loss": 0.73,
+ "step": 4174
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4194948557424083e-06,
+ "loss": 0.6586,
+ "step": 4175
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4161755036690771e-06,
+ "loss": 0.6972,
+ "step": 4176
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.412859741356778e-06,
+ "loss": 0.6949,
+ "step": 4177
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4095475701921657e-06,
+ "loss": 0.6544,
+ "step": 4178
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4062389915603903e-06,
+ "loss": 0.5937,
+ "step": 4179
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4029340068451003e-06,
+ "loss": 0.6087,
+ "step": 4180
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.3996326174284502e-06,
+ "loss": 0.573,
+ "step": 4181
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.3963348246910757e-06,
+ "loss": 0.6466,
+ "step": 4182
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.393040630012118e-06,
+ "loss": 0.6926,
+ "step": 4183
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3897500347692116e-06,
+ "loss": 0.6864,
+ "step": 4184
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.386463040338485e-06,
+ "loss": 0.7112,
+ "step": 4185
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3831796480945626e-06,
+ "loss": 0.5924,
+ "step": 4186
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3798998594105606e-06,
+ "loss": 0.6542,
+ "step": 4187
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3766236756580896e-06,
+ "loss": 0.7657,
+ "step": 4188
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3733510982072485e-06,
+ "loss": 0.6823,
+ "step": 4189
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3700821284266352e-06,
+ "loss": 0.7042,
+ "step": 4190
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3668167676833332e-06,
+ "loss": 0.6388,
+ "step": 4191
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3635550173429191e-06,
+ "loss": 0.6316,
+ "step": 4192
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3602968787694615e-06,
+ "loss": 0.7007,
+ "step": 4193
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3570423533255106e-06,
+ "loss": 0.6689,
+ "step": 4194
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3537914423721144e-06,
+ "loss": 0.6213,
+ "step": 4195
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3505441472688052e-06,
+ "loss": 0.7466,
+ "step": 4196
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3473004693736037e-06,
+ "loss": 0.5873,
+ "step": 4197
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3440604100430199e-06,
+ "loss": 0.6067,
+ "step": 4198
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3408239706320459e-06,
+ "loss": 0.7758,
+ "step": 4199
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3375911524941654e-06,
+ "loss": 0.6302,
+ "step": 4200
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3343619569813437e-06,
+ "loss": 0.7564,
+ "step": 4201
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3311363854440329e-06,
+ "loss": 0.5936,
+ "step": 4202
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3279144392311693e-06,
+ "loss": 0.5974,
+ "step": 4203
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.324696119690173e-06,
+ "loss": 0.6072,
+ "step": 4204
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3214814281669486e-06,
+ "loss": 0.5909,
+ "step": 4205
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3182703660058782e-06,
+ "loss": 0.5606,
+ "step": 4206
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.31506293454983e-06,
+ "loss": 0.6086,
+ "step": 4207
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.31185913514016e-06,
+ "loss": 0.7052,
+ "step": 4208
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.308658969116694e-06,
+ "loss": 0.6928,
+ "step": 4209
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3054624378177483e-06,
+ "loss": 0.5802,
+ "step": 4210
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.302269542580109e-06,
+ "loss": 0.6888,
+ "step": 4211
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2990802847390494e-06,
+ "loss": 0.6607,
+ "step": 4212
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2958946656283188e-06,
+ "loss": 0.6601,
+ "step": 4213
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2927126865801464e-06,
+ "loss": 0.723,
+ "step": 4214
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2895343489252355e-06,
+ "loss": 0.6336,
+ "step": 4215
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2863596539927715e-06,
+ "loss": 0.5732,
+ "step": 4216
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2831886031104123e-06,
+ "loss": 0.5904,
+ "step": 4217
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2800211976042942e-06,
+ "loss": 0.717,
+ "step": 4218
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2768574387990284e-06,
+ "loss": 0.717,
+ "step": 4219
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2736973280177e-06,
+ "loss": 0.6466,
+ "step": 4220
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2705408665818707e-06,
+ "loss": 0.5919,
+ "step": 4221
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2673880558115758e-06,
+ "loss": 0.708,
+ "step": 4222
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2642388970253194e-06,
+ "loss": 0.6557,
+ "step": 4223
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2610933915400825e-06,
+ "loss": 0.6157,
+ "step": 4224
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2579515406713194e-06,
+ "loss": 0.6599,
+ "step": 4225
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2548133457329514e-06,
+ "loss": 0.6661,
+ "step": 4226
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2516788080373766e-06,
+ "loss": 0.6562,
+ "step": 4227
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2485479288954627e-06,
+ "loss": 0.5929,
+ "step": 4228
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2454207096165405e-06,
+ "loss": 0.5216,
+ "step": 4229
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.242297151508416e-06,
+ "loss": 0.6835,
+ "step": 4230
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.239177255877365e-06,
+ "loss": 0.6545,
+ "step": 4231
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.236061024028129e-06,
+ "loss": 0.7112,
+ "step": 4232
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2329484572639183e-06,
+ "loss": 0.5941,
+ "step": 4233
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2298395568864109e-06,
+ "loss": 0.7091,
+ "step": 4234
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2267343241957508e-06,
+ "loss": 0.7488,
+ "step": 4235
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2236327604905473e-06,
+ "loss": 0.6756,
+ "step": 4236
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2205348670678774e-06,
+ "loss": 0.637,
+ "step": 4237
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2174406452232823e-06,
+ "loss": 0.6714,
+ "step": 4238
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.214350096250767e-06,
+ "loss": 0.734,
+ "step": 4239
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2112632214428032e-06,
+ "loss": 0.634,
+ "step": 4240
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2081800220903207e-06,
+ "loss": 0.6948,
+ "step": 4241
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2051004994827164e-06,
+ "loss": 0.6879,
+ "step": 4242
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2020246549078497e-06,
+ "loss": 0.7217,
+ "step": 4243
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1989524896520377e-06,
+ "loss": 0.7025,
+ "step": 4244
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1958840050000686e-06,
+ "loss": 0.6209,
+ "step": 4245
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.192819202235178e-06,
+ "loss": 0.6291,
+ "step": 4246
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.189758082639072e-06,
+ "loss": 0.6517,
+ "step": 4247
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1867006474919118e-06,
+ "loss": 0.6483,
+ "step": 4248
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.183646898072318e-06,
+ "loss": 0.64,
+ "step": 4249
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1805968356573728e-06,
+ "loss": 0.7659,
+ "step": 4250
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1775504615226118e-06,
+ "loss": 0.6546,
+ "step": 4251
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1745077769420342e-06,
+ "loss": 0.6892,
+ "step": 4252
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1714687831880867e-06,
+ "loss": 0.7534,
+ "step": 4253
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1684334815316833e-06,
+ "loss": 0.6145,
+ "step": 4254
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.165401873242188e-06,
+ "loss": 0.664,
+ "step": 4255
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1623739595874218e-06,
+ "loss": 0.6474,
+ "step": 4256
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1593497418336619e-06,
+ "loss": 0.7104,
+ "step": 4257
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1563292212456335e-06,
+ "loss": 0.6119,
+ "step": 4258
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1533123990865225e-06,
+ "loss": 0.5993,
+ "step": 4259
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1502992766179666e-06,
+ "loss": 0.7126,
+ "step": 4260
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1472898551000544e-06,
+ "loss": 0.632,
+ "step": 4261
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.144284135791327e-06,
+ "loss": 0.6259,
+ "step": 4262
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1412821199487845e-06,
+ "loss": 0.6113,
+ "step": 4263
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1382838088278647e-06,
+ "loss": 0.7127,
+ "step": 4264
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.135289203682466e-06,
+ "loss": 0.7493,
+ "step": 4265
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.132298305764934e-06,
+ "loss": 0.6204,
+ "step": 4266
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1293111163260639e-06,
+ "loss": 0.5929,
+ "step": 4267
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1263276366150999e-06,
+ "loss": 0.6236,
+ "step": 4268
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.123347867879737e-06,
+ "loss": 0.7376,
+ "step": 4269
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1203718113661177e-06,
+ "loss": 0.6941,
+ "step": 4270
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.117399468318824e-06,
+ "loss": 0.6911,
+ "step": 4271
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1144308399808968e-06,
+ "loss": 0.6597,
+ "step": 4272
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1114659275938188e-06,
+ "loss": 0.7204,
+ "step": 4273
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1085047323975173e-06,
+ "loss": 0.6353,
+ "step": 4274
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.105547255630368e-06,
+ "loss": 0.6777,
+ "step": 4275
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1025934985291853e-06,
+ "loss": 0.6397,
+ "step": 4276
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.099643462329235e-06,
+ "loss": 0.7203,
+ "step": 4277
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0966971482642241e-06,
+ "loss": 0.6596,
+ "step": 4278
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0937545575663023e-06,
+ "loss": 0.6722,
+ "step": 4279
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0908156914660638e-06,
+ "loss": 0.7712,
+ "step": 4280
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0878805511925438e-06,
+ "loss": 0.5824,
+ "step": 4281
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0849491379732201e-06,
+ "loss": 0.5593,
+ "step": 4282
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0820214530340124e-06,
+ "loss": 0.6829,
+ "step": 4283
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0790974975992785e-06,
+ "loss": 0.674,
+ "step": 4284
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0761772728918208e-06,
+ "loss": 0.6789,
+ "step": 4285
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0732607801328766e-06,
+ "loss": 0.5361,
+ "step": 4286
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0703480205421302e-06,
+ "loss": 0.655,
+ "step": 4287
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0674389953376928e-06,
+ "loss": 0.6161,
+ "step": 4288
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0645337057361215e-06,
+ "loss": 0.6056,
+ "step": 4289
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0616321529524154e-06,
+ "loss": 0.6338,
+ "step": 4290
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0587343382000027e-06,
+ "loss": 0.6755,
+ "step": 4291
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0558402626907537e-06,
+ "loss": 0.6723,
+ "step": 4292
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0529499276349686e-06,
+ "loss": 0.6208,
+ "step": 4293
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.05006333424139e-06,
+ "loss": 0.5605,
+ "step": 4294
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0471804837171916e-06,
+ "loss": 0.5531,
+ "step": 4295
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0443013772679855e-06,
+ "loss": 0.5618,
+ "step": 4296
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0414260160978141e-06,
+ "loss": 0.5886,
+ "step": 4297
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0385544014091565e-06,
+ "loss": 0.7307,
+ "step": 4298
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0356865344029231e-06,
+ "loss": 0.4359,
+ "step": 4299
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0328224162784573e-06,
+ "loss": 0.5052,
+ "step": 4300
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.029962048233537e-06,
+ "loss": 0.7199,
+ "step": 4301
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.027105431464368e-06,
+ "loss": 0.5686,
+ "step": 4302
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0242525671655912e-06,
+ "loss": 0.5642,
+ "step": 4303
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0214034565302766e-06,
+ "loss": 0.6341,
+ "step": 4304
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0185581007499212e-06,
+ "loss": 0.6722,
+ "step": 4305
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0157165010144564e-06,
+ "loss": 0.6842,
+ "step": 4306
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0128786585122385e-06,
+ "loss": 0.704,
+ "step": 4307
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0100445744300602e-06,
+ "loss": 0.5917,
+ "step": 4308
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0072142499531346e-06,
+ "loss": 0.6691,
+ "step": 4309
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.004387686265107e-06,
+ "loss": 0.6744,
+ "step": 4310
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0015648845480453e-06,
+ "loss": 0.6744,
+ "step": 4311
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.987458459824473e-07,
+ "loss": 0.4943,
+ "step": 4312
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.95930571747239e-07,
+ "loss": 0.6218,
+ "step": 4313
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.931190630197673e-07,
+ "loss": 0.6482,
+ "step": 4314
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.903113209758098e-07,
+ "loss": 0.7325,
+ "step": 4315
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.875073467895635e-07,
+ "loss": 0.6433,
+ "step": 4316
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.84707141633654e-07,
+ "loss": 0.6196,
+ "step": 4317
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.819107066791279e-07,
+ "loss": 0.6578,
+ "step": 4318
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.791180430954562e-07,
+ "loss": 0.7039,
+ "step": 4319
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.763291520505335e-07,
+ "loss": 0.6586,
+ "step": 4320
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.735440347106762e-07,
+ "loss": 0.6086,
+ "step": 4321
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.707626922406222e-07,
+ "loss": 0.7092,
+ "step": 4322
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.679851258035277e-07,
+ "loss": 0.6397,
+ "step": 4323
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.65211336560976e-07,
+ "loss": 0.7109,
+ "step": 4324
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.624413256729636e-07,
+ "loss": 0.5129,
+ "step": 4325
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.596750942979171e-07,
+ "loss": 0.6197,
+ "step": 4326
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.569126435926735e-07,
+ "loss": 0.7056,
+ "step": 4327
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.541539747124907e-07,
+ "loss": 0.7064,
+ "step": 4328
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.513990888110447e-07,
+ "loss": 0.5773,
+ "step": 4329
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.48647987040433e-07,
+ "loss": 0.6799,
+ "step": 4330
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.459006705511664e-07,
+ "loss": 0.5996,
+ "step": 4331
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.431571404921758e-07,
+ "loss": 0.6292,
+ "step": 4332
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.404173980108067e-07,
+ "loss": 0.5835,
+ "step": 4333
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.376814442528215e-07,
+ "loss": 0.6074,
+ "step": 4334
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.349492803623972e-07,
+ "loss": 0.7399,
+ "step": 4335
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.322209074821265e-07,
+ "loss": 0.6822,
+ "step": 4336
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.294963267530177e-07,
+ "loss": 0.7492,
+ "step": 4337
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.267755393144917e-07,
+ "loss": 0.7657,
+ "step": 4338
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.240585463043849e-07,
+ "loss": 0.6977,
+ "step": 4339
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.21345348858943e-07,
+ "loss": 0.6308,
+ "step": 4340
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.186359481128282e-07,
+ "loss": 0.5543,
+ "step": 4341
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.159303451991142e-07,
+ "loss": 0.6725,
+ "step": 4342
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.132285412492825e-07,
+ "loss": 0.6108,
+ "step": 4343
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.105305373932338e-07,
+ "loss": 0.7054,
+ "step": 4344
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.078363347592767e-07,
+ "loss": 0.6862,
+ "step": 4345
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.05145934474122e-07,
+ "loss": 0.5811,
+ "step": 4346
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.024593376629009e-07,
+ "loss": 0.6143,
+ "step": 4347
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.997765454491492e-07,
+ "loss": 0.6181,
+ "step": 4348
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.970975589548114e-07,
+ "loss": 0.693,
+ "step": 4349
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.944223793002438e-07,
+ "loss": 0.7096,
+ "step": 4350
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.917510076042058e-07,
+ "loss": 0.5809,
+ "step": 4351
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.890834449838681e-07,
+ "loss": 0.685,
+ "step": 4352
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.864196925548063e-07,
+ "loss": 0.585,
+ "step": 4353
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.837597514310048e-07,
+ "loss": 0.5433,
+ "step": 4354
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.811036227248515e-07,
+ "loss": 0.7106,
+ "step": 4355
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.784513075471413e-07,
+ "loss": 0.6527,
+ "step": 4356
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.758028070070768e-07,
+ "loss": 0.6997,
+ "step": 4357
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.731581222122587e-07,
+ "loss": 0.7155,
+ "step": 4358
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.705172542686968e-07,
+ "loss": 0.5893,
+ "step": 4359
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.678802042808043e-07,
+ "loss": 0.6694,
+ "step": 4360
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.652469733513958e-07,
+ "loss": 0.6524,
+ "step": 4361
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.626175625816957e-07,
+ "loss": 0.7439,
+ "step": 4362
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.599919730713191e-07,
+ "loss": 0.6689,
+ "step": 4363
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.57370205918292e-07,
+ "loss": 0.712,
+ "step": 4364
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.547522622190385e-07,
+ "loss": 0.5243,
+ "step": 4365
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.52138143068385e-07,
+ "loss": 0.5633,
+ "step": 4366
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.495278495595572e-07,
+ "loss": 0.554,
+ "step": 4367
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.469213827841816e-07,
+ "loss": 0.6763,
+ "step": 4368
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.443187438322864e-07,
+ "loss": 0.6542,
+ "step": 4369
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.417199337922932e-07,
+ "loss": 0.6318,
+ "step": 4370
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.391249537510271e-07,
+ "loss": 0.616,
+ "step": 4371
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.365338047937122e-07,
+ "loss": 0.6631,
+ "step": 4372
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.339464880039671e-07,
+ "loss": 0.7088,
+ "step": 4373
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.313630044638121e-07,
+ "loss": 0.6248,
+ "step": 4374
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.287833552536584e-07,
+ "loss": 0.7254,
+ "step": 4375
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.262075414523175e-07,
+ "loss": 0.6197,
+ "step": 4376
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.236355641369975e-07,
+ "loss": 0.7771,
+ "step": 4377
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.210674243833006e-07,
+ "loss": 0.6591,
+ "step": 4378
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.185031232652252e-07,
+ "loss": 0.5354,
+ "step": 4379
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.159426618551625e-07,
+ "loss": 0.6581,
+ "step": 4380
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.133860412238992e-07,
+ "loss": 0.593,
+ "step": 4381
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.108332624406168e-07,
+ "loss": 0.5694,
+ "step": 4382
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.082843265728879e-07,
+ "loss": 0.6976,
+ "step": 4383
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.05739234686681e-07,
+ "loss": 0.6185,
+ "step": 4384
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.031979878463525e-07,
+ "loss": 0.7164,
+ "step": 4385
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.006605871146578e-07,
+ "loss": 0.7657,
+ "step": 4386
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.981270335527347e-07,
+ "loss": 0.6405,
+ "step": 4387
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.955973282201179e-07,
+ "loss": 0.6838,
+ "step": 4388
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.930714721747323e-07,
+ "loss": 0.5586,
+ "step": 4389
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.905494664728941e-07,
+ "loss": 0.6457,
+ "step": 4390
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.880313121693073e-07,
+ "loss": 0.6191,
+ "step": 4391
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.855170103170673e-07,
+ "loss": 0.679,
+ "step": 4392
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.830065619676519e-07,
+ "loss": 0.6286,
+ "step": 4393
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.804999681709358e-07,
+ "loss": 0.5733,
+ "step": 4394
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.77997229975177e-07,
+ "loss": 0.6755,
+ "step": 4395
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.754983484270218e-07,
+ "loss": 0.4711,
+ "step": 4396
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.730033245715063e-07,
+ "loss": 0.6332,
+ "step": 4397
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.705121594520492e-07,
+ "loss": 0.7047,
+ "step": 4398
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.680248541104574e-07,
+ "loss": 0.6861,
+ "step": 4399
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.65541409586924e-07,
+ "loss": 0.7404,
+ "step": 4400
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.630618269200285e-07,
+ "loss": 0.7243,
+ "step": 4401
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.605861071467313e-07,
+ "loss": 0.6514,
+ "step": 4402
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.58114251302382e-07,
+ "loss": 0.5835,
+ "step": 4403
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.556462604207138e-07,
+ "loss": 0.6662,
+ "step": 4404
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.531821355338386e-07,
+ "loss": 0.6158,
+ "step": 4405
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.507218776722558e-07,
+ "loss": 0.5841,
+ "step": 4406
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.482654878648465e-07,
+ "loss": 0.6862,
+ "step": 4407
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.458129671388769e-07,
+ "loss": 0.6546,
+ "step": 4408
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.433643165199933e-07,
+ "loss": 0.6176,
+ "step": 4409
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.409195370322198e-07,
+ "loss": 0.6366,
+ "step": 4410
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.384786296979662e-07,
+ "loss": 0.4974,
+ "step": 4411
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.360415955380218e-07,
+ "loss": 0.6968,
+ "step": 4412
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.336084355715555e-07,
+ "loss": 0.6173,
+ "step": 4413
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.311791508161159e-07,
+ "loss": 0.6447,
+ "step": 4414
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.287537422876323e-07,
+ "loss": 0.4592,
+ "step": 4415
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.263322110004123e-07,
+ "loss": 0.6821,
+ "step": 4416
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.239145579671414e-07,
+ "loss": 0.6683,
+ "step": 4417
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.21500784198883e-07,
+ "loss": 0.7237,
+ "step": 4418
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.190908907050808e-07,
+ "loss": 0.6699,
+ "step": 4419
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.166848784935521e-07,
+ "loss": 0.6997,
+ "step": 4420
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.142827485704951e-07,
+ "loss": 0.6911,
+ "step": 4421
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.118845019404785e-07,
+ "loss": 0.617,
+ "step": 4422
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.094901396064535e-07,
+ "loss": 0.7309,
+ "step": 4423
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.070996625697424e-07,
+ "loss": 0.6371,
+ "step": 4424
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.047130718300433e-07,
+ "loss": 0.7058,
+ "step": 4425
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.023303683854322e-07,
+ "loss": 0.6719,
+ "step": 4426
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.999515532323598e-07,
+ "loss": 0.5113,
+ "step": 4427
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.975766273656425e-07,
+ "loss": 0.6213,
+ "step": 4428
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.952055917784783e-07,
+ "loss": 0.7129,
+ "step": 4429
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.928384474624361e-07,
+ "loss": 0.632,
+ "step": 4430
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.904751954074574e-07,
+ "loss": 0.7198,
+ "step": 4431
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.881158366018548e-07,
+ "loss": 0.7798,
+ "step": 4432
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.857603720323136e-07,
+ "loss": 0.6507,
+ "step": 4433
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.834088026838925e-07,
+ "loss": 0.6927,
+ "step": 4434
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.810611295400171e-07,
+ "loss": 0.7376,
+ "step": 4435
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.787173535824876e-07,
+ "loss": 0.6847,
+ "step": 4436
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.76377475791471e-07,
+ "loss": 0.671,
+ "step": 4437
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.740414971455079e-07,
+ "loss": 0.6727,
+ "step": 4438
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.717094186215056e-07,
+ "loss": 0.7386,
+ "step": 4439
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.693812411947387e-07,
+ "loss": 0.6189,
+ "step": 4440
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.670569658388537e-07,
+ "loss": 0.7575,
+ "step": 4441
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.647365935258642e-07,
+ "loss": 0.7601,
+ "step": 4442
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.624201252261497e-07,
+ "loss": 0.5803,
+ "step": 4443
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.601075619084641e-07,
+ "loss": 0.6646,
+ "step": 4444
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.57798904539918e-07,
+ "loss": 0.5952,
+ "step": 4445
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.554941540859949e-07,
+ "loss": 0.6725,
+ "step": 4446
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.531933115105427e-07,
+ "loss": 0.6761,
+ "step": 4447
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.508963777757749e-07,
+ "loss": 0.6826,
+ "step": 4448
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.48603353842272e-07,
+ "loss": 0.614,
+ "step": 4449
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.46314240668977e-07,
+ "loss": 0.5844,
+ "step": 4450
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.440290392131998e-07,
+ "loss": 0.68,
+ "step": 4451
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.417477504306102e-07,
+ "loss": 0.6995,
+ "step": 4452
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.394703752752474e-07,
+ "loss": 0.6731,
+ "step": 4453
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.371969146995106e-07,
+ "loss": 0.6861,
+ "step": 4454
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.349273696541625e-07,
+ "loss": 0.5894,
+ "step": 4455
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.326617410883296e-07,
+ "loss": 0.6477,
+ "step": 4456
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.304000299494961e-07,
+ "loss": 0.6004,
+ "step": 4457
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.281422371835133e-07,
+ "loss": 0.6015,
+ "step": 4458
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.258883637345914e-07,
+ "loss": 0.6633,
+ "step": 4459
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.236384105453008e-07,
+ "loss": 0.6352,
+ "step": 4460
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.213923785565723e-07,
+ "loss": 0.6356,
+ "step": 4461
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.191502687077034e-07,
+ "loss": 0.7181,
+ "step": 4462
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.169120819363406e-07,
+ "loss": 0.7065,
+ "step": 4463
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.146778191784963e-07,
+ "loss": 0.6495,
+ "step": 4464
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.124474813685422e-07,
+ "loss": 0.6785,
+ "step": 4465
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.102210694392052e-07,
+ "loss": 0.5879,
+ "step": 4466
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.079985843215719e-07,
+ "loss": 0.6604,
+ "step": 4467
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.057800269450908e-07,
+ "loss": 0.7239,
+ "step": 4468
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.0356539823756e-07,
+ "loss": 0.7049,
+ "step": 4469
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.013546991251373e-07,
+ "loss": 0.6489,
+ "step": 4470
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.991479305323433e-07,
+ "loss": 0.6762,
+ "step": 4471
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.969450933820486e-07,
+ "loss": 0.7405,
+ "step": 4472
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.947461885954798e-07,
+ "loss": 0.6235,
+ "step": 4473
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.925512170922232e-07,
+ "loss": 0.6347,
+ "step": 4474
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.903601797902126e-07,
+ "loss": 0.7469,
+ "step": 4475
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.881730776057448e-07,
+ "loss": 0.7085,
+ "step": 4476
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.859899114534662e-07,
+ "loss": 0.6473,
+ "step": 4477
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.838106822463773e-07,
+ "loss": 0.6594,
+ "step": 4478
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.816353908958361e-07,
+ "loss": 0.7282,
+ "step": 4479
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.794640383115479e-07,
+ "loss": 0.6147,
+ "step": 4480
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.772966254015755e-07,
+ "loss": 0.6324,
+ "step": 4481
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.751331530723314e-07,
+ "loss": 0.6715,
+ "step": 4482
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.729736222285832e-07,
+ "loss": 0.6705,
+ "step": 4483
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.708180337734448e-07,
+ "loss": 0.722,
+ "step": 4484
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.686663886083877e-07,
+ "loss": 0.6425,
+ "step": 4485
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.665186876332307e-07,
+ "loss": 0.6497,
+ "step": 4486
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.643749317461422e-07,
+ "loss": 0.7142,
+ "step": 4487
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.622351218436417e-07,
+ "loss": 0.6765,
+ "step": 4488
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.600992588206011e-07,
+ "loss": 0.5967,
+ "step": 4489
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.579673435702404e-07,
+ "loss": 0.7607,
+ "step": 4490
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.558393769841286e-07,
+ "loss": 0.5693,
+ "step": 4491
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.537153599521783e-07,
+ "loss": 0.6558,
+ "step": 4492
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.51595293362659e-07,
+ "loss": 0.7699,
+ "step": 4493
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.494791781021813e-07,
+ "loss": 0.7183,
+ "step": 4494
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.473670150557076e-07,
+ "loss": 0.6213,
+ "step": 4495
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.452588051065455e-07,
+ "loss": 0.6525,
+ "step": 4496
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.431545491363488e-07,
+ "loss": 0.6409,
+ "step": 4497
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.410542480251202e-07,
+ "loss": 0.7299,
+ "step": 4498
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.389579026512059e-07,
+ "loss": 0.5652,
+ "step": 4499
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.368655138913004e-07,
+ "loss": 0.7094,
+ "step": 4500
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.347770826204401e-07,
+ "loss": 0.7602,
+ "step": 4501
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.326926097120099e-07,
+ "loss": 0.7288,
+ "step": 4502
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.306120960377381e-07,
+ "loss": 0.6756,
+ "step": 4503
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.285355424676952e-07,
+ "loss": 0.6549,
+ "step": 4504
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.264629498702966e-07,
+ "loss": 0.6321,
+ "step": 4505
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.243943191123024e-07,
+ "loss": 0.6352,
+ "step": 4506
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.223296510588182e-07,
+ "loss": 0.5914,
+ "step": 4507
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.202689465732869e-07,
+ "loss": 0.6085,
+ "step": 4508
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.182122065174988e-07,
+ "loss": 0.6023,
+ "step": 4509
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.161594317515805e-07,
+ "loss": 0.6551,
+ "step": 4510
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.141106231340065e-07,
+ "loss": 0.6379,
+ "step": 4511
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.12065781521588e-07,
+ "loss": 0.5984,
+ "step": 4512
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.100249077694797e-07,
+ "loss": 0.6858,
+ "step": 4513
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.079880027311778e-07,
+ "loss": 0.5915,
+ "step": 4514
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.059550672585145e-07,
+ "loss": 0.5673,
+ "step": 4515
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.039261022016673e-07,
+ "loss": 0.6934,
+ "step": 4516
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.019011084091496e-07,
+ "loss": 0.6911,
+ "step": 4517
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.998800867278164e-07,
+ "loss": 0.641,
+ "step": 4518
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.978630380028582e-07,
+ "loss": 0.6463,
+ "step": 4519
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.958499630778068e-07,
+ "loss": 0.6635,
+ "step": 4520
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.938408627945335e-07,
+ "loss": 0.63,
+ "step": 4521
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.918357379932426e-07,
+ "loss": 0.629,
+ "step": 4522
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.898345895124801e-07,
+ "loss": 0.6955,
+ "step": 4523
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.878374181891266e-07,
+ "loss": 0.7135,
+ "step": 4524
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.858442248583995e-07,
+ "loss": 0.6274,
+ "step": 4525
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.838550103538575e-07,
+ "loss": 0.6323,
+ "step": 4526
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.818697755073876e-07,
+ "loss": 0.6379,
+ "step": 4527
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.798885211492187e-07,
+ "loss": 0.6653,
+ "step": 4528
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.779112481079107e-07,
+ "loss": 0.6742,
+ "step": 4529
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7593795721036176e-07,
+ "loss": 0.4923,
+ "step": 4530
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7396864928180296e-07,
+ "loss": 0.563,
+ "step": 4531
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7200332514580114e-07,
+ "loss": 0.6924,
+ "step": 4532
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.700419856242555e-07,
+ "loss": 0.6711,
+ "step": 4533
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.680846315373977e-07,
+ "loss": 0.65,
+ "step": 4534
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6613126370379627e-07,
+ "loss": 0.7212,
+ "step": 4535
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6418188294035217e-07,
+ "loss": 0.7278,
+ "step": 4536
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6223649006229553e-07,
+ "loss": 0.7756,
+ "step": 4537
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.602950858831934e-07,
+ "loss": 0.6381,
+ "step": 4538
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.583576712149385e-07,
+ "loss": 0.6061,
+ "step": 4539
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5642424686776154e-07,
+ "loss": 0.5825,
+ "step": 4540
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5449481365022143e-07,
+ "loss": 0.5942,
+ "step": 4541
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.525693723692082e-07,
+ "loss": 0.6067,
+ "step": 4542
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5064792382994216e-07,
+ "loss": 0.6055,
+ "step": 4543
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.487304688359762e-07,
+ "loss": 0.7032,
+ "step": 4544
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.468170081891898e-07,
+ "loss": 0.5596,
+ "step": 4545
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.449075426897931e-07,
+ "loss": 0.7694,
+ "step": 4546
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4300207313632713e-07,
+ "loss": 0.6363,
+ "step": 4547
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4110060032565903e-07,
+ "loss": 0.6354,
+ "step": 4548
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.392031250529871e-07,
+ "loss": 0.5222,
+ "step": 4549
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.373096481118377e-07,
+ "loss": 0.6971,
+ "step": 4550
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.3542017029406083e-07,
+ "loss": 0.7165,
+ "step": 4551
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.335346923898387e-07,
+ "loss": 0.5198,
+ "step": 4552
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.316532151876807e-07,
+ "loss": 0.6524,
+ "step": 4553
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2977573947442175e-07,
+ "loss": 0.7663,
+ "step": 4554
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.279022660352228e-07,
+ "loss": 0.7379,
+ "step": 4555
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2603279565357257e-07,
+ "loss": 0.6674,
+ "step": 4556
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2416732911128247e-07,
+ "loss": 0.7245,
+ "step": 4557
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2230586718849386e-07,
+ "loss": 0.7111,
+ "step": 4558
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2044841066367084e-07,
+ "loss": 0.7018,
+ "step": 4559
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.185949603136019e-07,
+ "loss": 0.6618,
+ "step": 4560
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.167455169134027e-07,
+ "loss": 0.6447,
+ "step": 4561
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.1490008123651115e-07,
+ "loss": 0.7252,
+ "step": 4562
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.130586540546899e-07,
+ "loss": 0.7573,
+ "step": 4563
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.112212361380252e-07,
+ "loss": 0.6655,
+ "step": 4564
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.093878282549246e-07,
+ "loss": 0.72,
+ "step": 4565
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0755843117212255e-07,
+ "loss": 0.5732,
+ "step": 4566
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.057330456546737e-07,
+ "loss": 0.7633,
+ "step": 4567
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.039116724659564e-07,
+ "loss": 0.6925,
+ "step": 4568
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0209431236766793e-07,
+ "loss": 0.594,
+ "step": 4569
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0028096611982815e-07,
+ "loss": 0.6803,
+ "step": 4570
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9847163448078373e-07,
+ "loss": 0.5945,
+ "step": 4571
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9666631820719723e-07,
+ "loss": 0.7716,
+ "step": 4572
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9486501805405253e-07,
+ "loss": 0.6246,
+ "step": 4573
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9306773477465366e-07,
+ "loss": 0.6801,
+ "step": 4574
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9127446912062606e-07,
+ "loss": 0.5299,
+ "step": 4575
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.894852218419154e-07,
+ "loss": 0.6717,
+ "step": 4576
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.876999936867865e-07,
+ "loss": 0.6009,
+ "step": 4577
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8591878540182093e-07,
+ "loss": 0.6428,
+ "step": 4578
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8414159773192294e-07,
+ "loss": 0.6847,
+ "step": 4579
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8236843142031244e-07,
+ "loss": 0.5881,
+ "step": 4580
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8059928720852957e-07,
+ "loss": 0.6667,
+ "step": 4581
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.788341658364314e-07,
+ "loss": 0.5976,
+ "step": 4582
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.770730680421919e-07,
+ "loss": 0.5946,
+ "step": 4583
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.753159945623053e-07,
+ "loss": 0.6789,
+ "step": 4584
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.735629461315804e-07,
+ "loss": 0.5734,
+ "step": 4585
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.7181392348314085e-07,
+ "loss": 0.6376,
+ "step": 4586
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.7006892734843145e-07,
+ "loss": 0.7141,
+ "step": 4587
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.683279584572086e-07,
+ "loss": 0.5728,
+ "step": 4588
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6659101753754975e-07,
+ "loss": 0.6756,
+ "step": 4589
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6485810531584287e-07,
+ "loss": 0.6441,
+ "step": 4590
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6312922251679373e-07,
+ "loss": 0.6182,
+ "step": 4591
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6140436986342067e-07,
+ "loss": 0.653,
+ "step": 4592
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5968354807705994e-07,
+ "loss": 0.6858,
+ "step": 4593
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5796675787735933e-07,
+ "loss": 0.6766,
+ "step": 4594
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5625399998228116e-07,
+ "loss": 0.6417,
+ "step": 4595
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5454527510810355e-07,
+ "loss": 0.6306,
+ "step": 4596
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.528405839694149e-07,
+ "loss": 0.5759,
+ "step": 4597
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.511399272791194e-07,
+ "loss": 0.6595,
+ "step": 4598
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4944330574843144e-07,
+ "loss": 0.696,
+ "step": 4599
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4775072008688016e-07,
+ "loss": 0.6327,
+ "step": 4600
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.46062171002306e-07,
+ "loss": 0.731,
+ "step": 4601
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4437765920086184e-07,
+ "loss": 0.6747,
+ "step": 4602
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.426971853870109e-07,
+ "loss": 0.5998,
+ "step": 4603
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4102075026352766e-07,
+ "loss": 0.5633,
+ "step": 4604
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3934835453149797e-07,
+ "loss": 0.6392,
+ "step": 4605
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.376799988903201e-07,
+ "loss": 0.7155,
+ "step": 4606
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.360156840377027e-07,
+ "loss": 0.6238,
+ "step": 4607
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3435541066966225e-07,
+ "loss": 0.7011,
+ "step": 4608
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3269917948052454e-07,
+ "loss": 0.7323,
+ "step": 4609
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3104699116292883e-07,
+ "loss": 0.6741,
+ "step": 4610
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.293988464078202e-07,
+ "loss": 0.6523,
+ "step": 4611
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.277547459044539e-07,
+ "loss": 0.7111,
+ "step": 4612
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2611469034039334e-07,
+ "loss": 0.6847,
+ "step": 4613
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.244786804015121e-07,
+ "loss": 0.6369,
+ "step": 4614
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2284671677199065e-07,
+ "loss": 0.6534,
+ "step": 4615
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.212188001343164e-07,
+ "loss": 0.6621,
+ "step": 4616
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.1959493116928473e-07,
+ "loss": 0.5339,
+ "step": 4617
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.179751105559992e-07,
+ "loss": 0.6388,
+ "step": 4618
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.163593389718711e-07,
+ "loss": 0.6702,
+ "step": 4619
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.147476170926156e-07,
+ "loss": 0.6858,
+ "step": 4620
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.131399455922568e-07,
+ "loss": 0.6388,
+ "step": 4621
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.115363251431225e-07,
+ "loss": 0.8452,
+ "step": 4622
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.099367564158484e-07,
+ "loss": 0.5503,
+ "step": 4623
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0834124007937616e-07,
+ "loss": 0.5812,
+ "step": 4624
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0674977680095086e-07,
+ "loss": 0.6808,
+ "step": 4625
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0516236724612567e-07,
+ "loss": 0.663,
+ "step": 4626
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.03579012078753e-07,
+ "loss": 0.6483,
+ "step": 4627
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0199971196099407e-07,
+ "loss": 0.6768,
+ "step": 4628
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.004244675533141e-07,
+ "loss": 0.6428,
+ "step": 4629
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.988532795144816e-07,
+ "loss": 0.6305,
+ "step": 4630
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.972861485015666e-07,
+ "loss": 0.666,
+ "step": 4631
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.9572307516994693e-07,
+ "loss": 0.6993,
+ "step": 4632
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.941640601732976e-07,
+ "loss": 0.6078,
+ "step": 4633
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.9260910416360145e-07,
+ "loss": 0.5873,
+ "step": 4634
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.910582077911428e-07,
+ "loss": 0.6187,
+ "step": 4635
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8951137170450727e-07,
+ "loss": 0.5157,
+ "step": 4636
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8796859655058184e-07,
+ "loss": 0.8078,
+ "step": 4637
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.864298829745571e-07,
+ "loss": 0.7041,
+ "step": 4638
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8489523161992385e-07,
+ "loss": 0.6585,
+ "step": 4639
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.833646431284731e-07,
+ "loss": 0.5919,
+ "step": 4640
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.818381181402985e-07,
+ "loss": 0.5972,
+ "step": 4641
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8031565729379373e-07,
+ "loss": 0.6693,
+ "step": 4642
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.787972612256551e-07,
+ "loss": 0.6451,
+ "step": 4643
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7728293057087463e-07,
+ "loss": 0.726,
+ "step": 4644
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7577266596274577e-07,
+ "loss": 0.6708,
+ "step": 4645
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.742664680328644e-07,
+ "loss": 0.6282,
+ "step": 4646
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.727643374111222e-07,
+ "loss": 0.5847,
+ "step": 4647
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7126627472571e-07,
+ "loss": 0.6774,
+ "step": 4648
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.697722806031211e-07,
+ "loss": 0.5293,
+ "step": 4649
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6828235566814465e-07,
+ "loss": 0.6468,
+ "step": 4650
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.667965005438644e-07,
+ "loss": 0.6964,
+ "step": 4651
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6531471585167e-07,
+ "loss": 0.64,
+ "step": 4652
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6383700221124355e-07,
+ "loss": 0.6758,
+ "step": 4653
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.623633602405662e-07,
+ "loss": 0.5384,
+ "step": 4654
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6089379055591613e-07,
+ "loss": 0.6148,
+ "step": 4655
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.594282937718673e-07,
+ "loss": 0.6546,
+ "step": 4656
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.579668705012917e-07,
+ "loss": 0.6122,
+ "step": 4657
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5650952135535814e-07,
+ "loss": 0.6987,
+ "step": 4658
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5505624694353027e-07,
+ "loss": 0.6243,
+ "step": 4659
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.536070478735686e-07,
+ "loss": 0.65,
+ "step": 4660
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5216192475152945e-07,
+ "loss": 0.6749,
+ "step": 4661
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.507208781817638e-07,
+ "loss": 0.6693,
+ "step": 4662
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.492839087669197e-07,
+ "loss": 0.7152,
+ "step": 4663
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.4785101710793627e-07,
+ "loss": 0.6627,
+ "step": 4664
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.464222038040509e-07,
+ "loss": 0.6815,
+ "step": 4665
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.4499746945279566e-07,
+ "loss": 0.7744,
+ "step": 4666
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.435768146499939e-07,
+ "loss": 0.6424,
+ "step": 4667
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.421602399897638e-07,
+ "loss": 0.5897,
+ "step": 4668
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.407477460645191e-07,
+ "loss": 0.7492,
+ "step": 4669
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3933933346496406e-07,
+ "loss": 0.6486,
+ "step": 4670
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3793500278009974e-07,
+ "loss": 0.655,
+ "step": 4671
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3653475459721742e-07,
+ "loss": 0.7006,
+ "step": 4672
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3513858950190206e-07,
+ "loss": 0.648,
+ "step": 4673
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.337465080780299e-07,
+ "loss": 0.6626,
+ "step": 4674
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3235851090777084e-07,
+ "loss": 0.6003,
+ "step": 4675
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.309745985715861e-07,
+ "loss": 0.7213,
+ "step": 4676
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2959477164822942e-07,
+ "loss": 0.6586,
+ "step": 4677
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.282190307147447e-07,
+ "loss": 0.5373,
+ "step": 4678
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2684737634646737e-07,
+ "loss": 0.7573,
+ "step": 4679
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2547980911702406e-07,
+ "loss": 0.6428,
+ "step": 4680
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2411632959833285e-07,
+ "loss": 0.7012,
+ "step": 4681
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2275693836060207e-07,
+ "loss": 0.6117,
+ "step": 4682
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2140163597233033e-07,
+ "loss": 0.6774,
+ "step": 4683
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.200504230003042e-07,
+ "loss": 0.6824,
+ "step": 4684
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1870330000960504e-07,
+ "loss": 0.7001,
+ "step": 4685
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1736026756359664e-07,
+ "loss": 0.6831,
+ "step": 4686
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1602132622393745e-07,
+ "loss": 0.674,
+ "step": 4687
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1468647655057406e-07,
+ "loss": 0.7172,
+ "step": 4688
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1335571910174214e-07,
+ "loss": 0.6946,
+ "step": 4689
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1202905443396538e-07,
+ "loss": 0.6143,
+ "step": 4690
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1070648310205443e-07,
+ "loss": 0.7351,
+ "step": 4691
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.093880056591091e-07,
+ "loss": 0.6815,
+ "step": 4692
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0807362265651943e-07,
+ "loss": 0.749,
+ "step": 4693
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0676333464396125e-07,
+ "loss": 0.619,
+ "step": 4694
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0545714216939515e-07,
+ "loss": 0.6728,
+ "step": 4695
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.041550457790753e-07,
+ "loss": 0.6531,
+ "step": 4696
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0285704601753608e-07,
+ "loss": 0.6433,
+ "step": 4697
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0156314342760442e-07,
+ "loss": 0.6213,
+ "step": 4698
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0027333855038967e-07,
+ "loss": 0.6973,
+ "step": 4699
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9898763192528926e-07,
+ "loss": 0.6623,
+ "step": 4700
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9770602408998642e-07,
+ "loss": 0.6799,
+ "step": 4701
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9642851558045128e-07,
+ "loss": 0.5761,
+ "step": 4702
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9515510693093875e-07,
+ "loss": 0.7058,
+ "step": 4703
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9388579867398727e-07,
+ "loss": 0.7992,
+ "step": 4704
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9262059134042443e-07,
+ "loss": 0.7074,
+ "step": 4705
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9135948545935923e-07,
+ "loss": 0.8074,
+ "step": 4706
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9010248155818755e-07,
+ "loss": 0.649,
+ "step": 4707
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8884958016259114e-07,
+ "loss": 0.6641,
+ "step": 4708
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8760078179653196e-07,
+ "loss": 0.5772,
+ "step": 4709
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8635608698225892e-07,
+ "loss": 0.562,
+ "step": 4710
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8511549624030346e-07,
+ "loss": 0.6649,
+ "step": 4711
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8387901008948272e-07,
+ "loss": 0.6832,
+ "step": 4712
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8264662904689533e-07,
+ "loss": 0.5893,
+ "step": 4713
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8141835362792458e-07,
+ "loss": 0.6382,
+ "step": 4714
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8019418434623405e-07,
+ "loss": 0.6923,
+ "step": 4715
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7897412171377416e-07,
+ "loss": 0.6539,
+ "step": 4716
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7775816624077458e-07,
+ "loss": 0.634,
+ "step": 4717
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7654631843575077e-07,
+ "loss": 0.7328,
+ "step": 4718
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.753385788054962e-07,
+ "loss": 0.6268,
+ "step": 4719
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7413494785508911e-07,
+ "loss": 0.6044,
+ "step": 4720
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7293542608788905e-07,
+ "loss": 0.5955,
+ "step": 4721
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7174001400553586e-07,
+ "loss": 0.6991,
+ "step": 4722
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7054871210795188e-07,
+ "loss": 0.6793,
+ "step": 4723
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.693615208933408e-07,
+ "loss": 0.7133,
+ "step": 4724
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6817844085818768e-07,
+ "loss": 0.5032,
+ "step": 4725
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6699947249725568e-07,
+ "loss": 0.67,
+ "step": 4726
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6582461630359149e-07,
+ "loss": 0.6727,
+ "step": 4727
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6465387276851874e-07,
+ "loss": 0.6575,
+ "step": 4728
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6348724238164583e-07,
+ "loss": 0.6039,
+ "step": 4729
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6232472563085688e-07,
+ "loss": 0.6471,
+ "step": 4730
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6116632300231638e-07,
+ "loss": 0.677,
+ "step": 4731
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6001203498047124e-07,
+ "loss": 0.6782,
+ "step": 4732
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.58861862048042e-07,
+ "loss": 0.7476,
+ "step": 4733
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5771580468603608e-07,
+ "loss": 0.636,
+ "step": 4734
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5657386337373237e-07,
+ "loss": 0.7182,
+ "step": 4735
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5543603858869216e-07,
+ "loss": 0.6603,
+ "step": 4736
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5430233080675593e-07,
+ "loss": 0.7553,
+ "step": 4737
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.531727405020389e-07,
+ "loss": 0.6995,
+ "step": 4738
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.520472681469376e-07,
+ "loss": 0.7324,
+ "step": 4739
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5092591421212444e-07,
+ "loss": 0.5991,
+ "step": 4740
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4980867916655205e-07,
+ "loss": 0.6266,
+ "step": 4741
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4869556347744674e-07,
+ "loss": 0.5351,
+ "step": 4742
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.475865676103161e-07,
+ "loss": 0.664,
+ "step": 4743
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4648169202894248e-07,
+ "loss": 0.7014,
+ "step": 4744
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4538093719538404e-07,
+ "loss": 0.735,
+ "step": 4745
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4428430356997925e-07,
+ "loss": 0.6367,
+ "step": 4746
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.431917916113401e-07,
+ "loss": 0.6379,
+ "step": 4747
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4210340177635563e-07,
+ "loss": 0.7511,
+ "step": 4748
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4101913452019277e-07,
+ "loss": 0.5842,
+ "step": 4749
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3993899029629e-07,
+ "loss": 0.6178,
+ "step": 4750
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3886296955636481e-07,
+ "loss": 0.6493,
+ "step": 4751
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.377910727504117e-07,
+ "loss": 0.749,
+ "step": 4752
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3672330032669767e-07,
+ "loss": 0.662,
+ "step": 4753
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3565965273176663e-07,
+ "loss": 0.679,
+ "step": 4754
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3460013041043606e-07,
+ "loss": 0.5408,
+ "step": 4755
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3354473380579825e-07,
+ "loss": 0.5126,
+ "step": 4756
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.324934633592201e-07,
+ "loss": 0.6809,
+ "step": 4757
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3144631951034547e-07,
+ "loss": 0.6262,
+ "step": 4758
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.304033026970908e-07,
+ "loss": 0.6964,
+ "step": 4759
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2936441335564376e-07,
+ "loss": 0.6364,
+ "step": 4760
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.283296519204713e-07,
+ "loss": 0.5762,
+ "step": 4761
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2729901882431062e-07,
+ "loss": 0.7211,
+ "step": 4762
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2627251449817247e-07,
+ "loss": 0.6316,
+ "step": 4763
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2525013937134122e-07,
+ "loss": 0.6702,
+ "step": 4764
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2423189387137713e-07,
+ "loss": 0.6585,
+ "step": 4765
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2321777842410842e-07,
+ "loss": 0.5784,
+ "step": 4766
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2220779345364143e-07,
+ "loss": 0.7,
+ "step": 4767
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2120193938234937e-07,
+ "loss": 0.6504,
+ "step": 4768
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2020021663088244e-07,
+ "loss": 0.7216,
+ "step": 4769
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1920262561816331e-07,
+ "loss": 0.7135,
+ "step": 4770
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1820916676138384e-07,
+ "loss": 0.6489,
+ "step": 4771
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1721984047601054e-07,
+ "loss": 0.5129,
+ "step": 4772
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1623464717577804e-07,
+ "loss": 0.676,
+ "step": 4773
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1525358727269564e-07,
+ "loss": 0.706,
+ "step": 4774
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1427666117704406e-07,
+ "loss": 0.7221,
+ "step": 4775
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1330386929737313e-07,
+ "loss": 0.7275,
+ "step": 4776
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1233521204050634e-07,
+ "loss": 0.653,
+ "step": 4777
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1137068981153632e-07,
+ "loss": 0.7911,
+ "step": 4778
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1041030301382705e-07,
+ "loss": 0.6216,
+ "step": 4779
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0945405204901283e-07,
+ "loss": 0.7192,
+ "step": 4780
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.085019373169971e-07,
+ "loss": 0.6749,
+ "step": 4781
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0755395921595691e-07,
+ "loss": 0.6441,
+ "step": 4782
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0661011814233624e-07,
+ "loss": 0.6192,
+ "step": 4783
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0567041449085158e-07,
+ "loss": 0.7154,
+ "step": 4784
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0473484865448524e-07,
+ "loss": 0.737,
+ "step": 4785
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0380342102449204e-07,
+ "loss": 0.5882,
+ "step": 4786
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0287613199039592e-07,
+ "loss": 0.6597,
+ "step": 4787
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0195298193999114e-07,
+ "loss": 0.7359,
+ "step": 4788
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0103397125933778e-07,
+ "loss": 0.6447,
+ "step": 4789
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.001191003327684e-07,
+ "loss": 0.7094,
+ "step": 4790
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.920836954288137e-08,
+ "loss": 0.6745,
+ "step": 4791
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.830177927054429e-08,
+ "loss": 0.5785,
+ "step": 4792
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.739932989489498e-08,
+ "loss": 0.4925,
+ "step": 4793
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.65010217933382e-08,
+ "loss": 0.5455,
+ "step": 4794
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.56068553415479e-08,
+ "loss": 0.6496,
+ "step": 4795
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.471683091346273e-08,
+ "loss": 0.7082,
+ "step": 4796
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.383094888129274e-08,
+ "loss": 0.6298,
+ "step": 4797
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.29492096155149e-08,
+ "loss": 0.6597,
+ "step": 4798
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.207161348487315e-08,
+ "loss": 0.6605,
+ "step": 4799
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.119816085637722e-08,
+ "loss": 0.6371,
+ "step": 4800
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.032885209530717e-08,
+ "loss": 0.5957,
+ "step": 4801
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.946368756520885e-08,
+ "loss": 0.6103,
+ "step": 4802
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.860266762789283e-08,
+ "loss": 0.5636,
+ "step": 4803
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.774579264343996e-08,
+ "loss": 0.5715,
+ "step": 4804
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.689306297019362e-08,
+ "loss": 0.639,
+ "step": 4805
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.604447896476853e-08,
+ "loss": 0.7365,
+ "step": 4806
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.520004098204193e-08,
+ "loss": 0.5093,
+ "step": 4807
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.435974937515801e-08,
+ "loss": 0.6264,
+ "step": 4808
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.352360449552787e-08,
+ "loss": 0.5602,
+ "step": 4809
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.269160669282738e-08,
+ "loss": 0.7253,
+ "step": 4810
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.186375631499821e-08,
+ "loss": 0.6201,
+ "step": 4811
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.104005370824896e-08,
+ "loss": 0.7068,
+ "step": 4812
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.0220499217053e-08,
+ "loss": 0.7459,
+ "step": 4813
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.940509318414725e-08,
+ "loss": 0.648,
+ "step": 4814
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.859383595053449e-08,
+ "loss": 0.6042,
+ "step": 4815
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.778672785548558e-08,
+ "loss": 0.668,
+ "step": 4816
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.69837692365305e-08,
+ "loss": 0.5274,
+ "step": 4817
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.618496042946954e-08,
+ "loss": 0.7404,
+ "step": 4818
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.539030176836549e-08,
+ "loss": 0.6038,
+ "step": 4819
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.459979358554248e-08,
+ "loss": 0.6702,
+ "step": 4820
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.381343621159275e-08,
+ "loss": 0.6728,
+ "step": 4821
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.303122997537215e-08,
+ "loss": 0.6146,
+ "step": 4822
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.2253175203999e-08,
+ "loss": 0.6256,
+ "step": 4823
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.147927222285634e-08,
+ "loss": 0.661,
+ "step": 4824
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.070952135559195e-08,
+ "loss": 0.4944,
+ "step": 4825
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.9943922924115e-08,
+ "loss": 0.6203,
+ "step": 4826
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.918247724859939e-08,
+ "loss": 0.6331,
+ "step": 4827
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.84251846474826e-08,
+ "loss": 0.5939,
+ "step": 4828
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.767204543746463e-08,
+ "loss": 0.6071,
+ "step": 4829
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.692305993350912e-08,
+ "loss": 0.7131,
+ "step": 4830
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.617822844884325e-08,
+ "loss": 0.5866,
+ "step": 4831
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.543755129495343e-08,
+ "loss": 0.662,
+ "step": 4832
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.470102878159301e-08,
+ "loss": 0.637,
+ "step": 4833
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.396866121677558e-08,
+ "loss": 0.676,
+ "step": 4834
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.324044890677839e-08,
+ "loss": 0.7089,
+ "step": 4835
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.251639215614113e-08,
+ "loss": 0.5898,
+ "step": 4836
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.179649126766274e-08,
+ "loss": 0.6209,
+ "step": 4837
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.108074654240792e-08,
+ "loss": 0.5865,
+ "step": 4838
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.036915827969947e-08,
+ "loss": 0.6709,
+ "step": 4839
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.966172677712711e-08,
+ "loss": 0.644,
+ "step": 4840
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.895845233053643e-08,
+ "loss": 0.6564,
+ "step": 4841
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.825933523403881e-08,
+ "loss": 0.6693,
+ "step": 4842
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.7564375780004843e-08,
+ "loss": 0.6076,
+ "step": 4843
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.6873574259066476e-08,
+ "loss": 0.7387,
+ "step": 4844
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.618693096011929e-08,
+ "loss": 0.6455,
+ "step": 4845
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.550444617031581e-08,
+ "loss": 0.6901,
+ "step": 4846
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.482612017507327e-08,
+ "loss": 0.499,
+ "step": 4847
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.415195325806699e-08,
+ "loss": 0.5974,
+ "step": 4848
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.348194570123588e-08,
+ "loss": 0.7753,
+ "step": 4849
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.28160977847747e-08,
+ "loss": 0.6428,
+ "step": 4850
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.2154409787141815e-08,
+ "loss": 0.6135,
+ "step": 4851
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.149688198505809e-08,
+ "loss": 0.5738,
+ "step": 4852
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.084351465350024e-08,
+ "loss": 0.6687,
+ "step": 4853
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.019430806570747e-08,
+ "loss": 0.6878,
+ "step": 4854
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.954926249317815e-08,
+ "loss": 0.6452,
+ "step": 4855
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.890837820566985e-08,
+ "loss": 0.7035,
+ "step": 4856
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.8271655471202605e-08,
+ "loss": 0.6285,
+ "step": 4857
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.7639094556053424e-08,
+ "loss": 0.7657,
+ "step": 4858
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.701069572475958e-08,
+ "loss": 0.6526,
+ "step": 4859
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.638645924011753e-08,
+ "loss": 0.6282,
+ "step": 4860
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.5766385363184006e-08,
+ "loss": 0.6974,
+ "step": 4861
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.5150474353274906e-08,
+ "loss": 0.7134,
+ "step": 4862
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.453872646796309e-08,
+ "loss": 0.7126,
+ "step": 4863
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.393114196308279e-08,
+ "loss": 0.5335,
+ "step": 4864
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.3327721092726314e-08,
+ "loss": 0.7343,
+ "step": 4865
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.272846410924514e-08,
+ "loss": 0.6885,
+ "step": 4866
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.213337126324768e-08,
+ "loss": 0.6135,
+ "step": 4867
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.154244280360265e-08,
+ "loss": 0.6516,
+ "step": 4868
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.0955678977436796e-08,
+ "loss": 0.5566,
+ "step": 4869
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.0373080030134957e-08,
+ "loss": 0.568,
+ "step": 4870
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.979464620534113e-08,
+ "loss": 0.6415,
+ "step": 4871
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.922037774495624e-08,
+ "loss": 0.5877,
+ "step": 4872
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.8650274889139306e-08,
+ "loss": 0.67,
+ "step": 4873
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.8084337876308494e-08,
+ "loss": 0.6503,
+ "step": 4874
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.752256694313783e-08,
+ "loss": 0.5915,
+ "step": 4875
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.696496232456159e-08,
+ "loss": 0.615,
+ "step": 4876
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.641152425376992e-08,
+ "loss": 0.6074,
+ "step": 4877
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.586225296221102e-08,
+ "loss": 0.6965,
+ "step": 4878
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.531714867959113e-08,
+ "loss": 0.6448,
+ "step": 4879
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.477621163387124e-08,
+ "loss": 0.6437,
+ "step": 4880
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.423944205127372e-08,
+ "loss": 0.6113,
+ "step": 4881
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.370684015627457e-08,
+ "loss": 0.5741,
+ "step": 4882
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.3178406171608946e-08,
+ "loss": 0.5999,
+ "step": 4883
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.2654140318268965e-08,
+ "loss": 0.5845,
+ "step": 4884
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.213404281550148e-08,
+ "loss": 0.6716,
+ "step": 4885
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.161811388081249e-08,
+ "loss": 0.6847,
+ "step": 4886
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.1106353729962734e-08,
+ "loss": 0.672,
+ "step": 4887
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.059876257697214e-08,
+ "loss": 0.682,
+ "step": 4888
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.009534063411534e-08,
+ "loss": 0.6686,
+ "step": 4889
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.9596088111922828e-08,
+ "loss": 0.6836,
+ "step": 4890
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.910100521918313e-08,
+ "loss": 0.6753,
+ "step": 4891
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.8610092162939528e-08,
+ "loss": 0.665,
+ "step": 4892
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.812334914849335e-08,
+ "loss": 0.7087,
+ "step": 4893
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.7640776379399547e-08,
+ "loss": 0.6313,
+ "step": 4894
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.716237405747113e-08,
+ "loss": 0.6664,
+ "step": 4895
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.668814238277584e-08,
+ "loss": 0.5446,
+ "step": 4896
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.6218081553638363e-08,
+ "loss": 0.6654,
+ "step": 4897
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.5752191766638125e-08,
+ "loss": 0.5524,
+ "step": 4898
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.5290473216610378e-08,
+ "loss": 0.6205,
+ "step": 4899
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.4832926096646225e-08,
+ "loss": 0.631,
+ "step": 4900
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.4379550598092604e-08,
+ "loss": 0.6424,
+ "step": 4901
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3930346910550073e-08,
+ "loss": 0.6477,
+ "step": 4902
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3485315221877246e-08,
+ "loss": 0.6868,
+ "step": 4903
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3044455718185254e-08,
+ "loss": 0.6658,
+ "step": 4904
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.2607768583843282e-08,
+ "loss": 0.6247,
+ "step": 4905
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.2175254001474135e-08,
+ "loss": 0.586,
+ "step": 4906
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.1746912151955346e-08,
+ "loss": 0.7153,
+ "step": 4907
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.1322743214420295e-08,
+ "loss": 0.7068,
+ "step": 4908
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.090274736625486e-08,
+ "loss": 0.6623,
+ "step": 4909
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.048692478310299e-08,
+ "loss": 0.7048,
+ "step": 4910
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.0075275638862247e-08,
+ "loss": 0.6996,
+ "step": 4911
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.9667800105683808e-08,
+ "loss": 0.5801,
+ "step": 4912
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.9264498353974704e-08,
+ "loss": 0.6379,
+ "step": 4913
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.8865370552395567e-08,
+ "loss": 0.6455,
+ "step": 4914
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.8470416867861775e-08,
+ "loss": 0.6874,
+ "step": 4915
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.807963746554342e-08,
+ "loss": 0.6045,
+ "step": 4916
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.769303250886534e-08,
+ "loss": 0.5629,
+ "step": 4917
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.7310602159505973e-08,
+ "loss": 0.7159,
+ "step": 4918
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6932346577397396e-08,
+ "loss": 0.6373,
+ "step": 4919
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.655826592072529e-08,
+ "loss": 0.6922,
+ "step": 4920
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6188360345932297e-08,
+ "loss": 0.6223,
+ "step": 4921
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.5822630007712447e-08,
+ "loss": 0.6078,
+ "step": 4922
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.546107505901451e-08,
+ "loss": 0.6243,
+ "step": 4923
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.5103695651040862e-08,
+ "loss": 0.7139,
+ "step": 4924
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4750491933247513e-08,
+ "loss": 0.6249,
+ "step": 4925
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4401464053345194e-08,
+ "loss": 0.7669,
+ "step": 4926
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4056612157297145e-08,
+ "loss": 0.6207,
+ "step": 4927
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.3715936389320228e-08,
+ "loss": 0.7199,
+ "step": 4928
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.3379436891886034e-08,
+ "loss": 0.6447,
+ "step": 4929
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.304711380571866e-08,
+ "loss": 0.5794,
+ "step": 4930
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2718967269795823e-08,
+ "loss": 0.538,
+ "step": 4931
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2394997421347753e-08,
+ "loss": 0.6675,
+ "step": 4932
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2075204395860518e-08,
+ "loss": 0.7181,
+ "step": 4933
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.175958832707047e-08,
+ "loss": 0.6984,
+ "step": 4934
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.1448149346969806e-08,
+ "loss": 0.6731,
+ "step": 4935
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.1140887585801007e-08,
+ "loss": 0.7127,
+ "step": 4936
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.083780317206129e-08,
+ "loss": 0.5995,
+ "step": 4937
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0538896232501484e-08,
+ "loss": 0.7262,
+ "step": 4938
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0244166892124929e-08,
+ "loss": 0.6117,
+ "step": 4939
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.953615274186367e-09,
+ "loss": 0.7064,
+ "step": 4940
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.667241500196378e-09,
+ "loss": 0.7207,
+ "step": 4941
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.385045689914719e-09,
+ "loss": 0.6079,
+ "step": 4942
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.107027961356985e-09,
+ "loss": 0.5582,
+ "step": 4943
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.833188430790175e-09,
+ "loss": 0.7484,
+ "step": 4944
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.563527212734902e-09,
+ "loss": 0.5029,
+ "step": 4945
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.29804441996207e-09,
+ "loss": 0.7883,
+ "step": 4946
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.036740163498425e-09,
+ "loss": 0.6856,
+ "step": 4947
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.779614552619886e-09,
+ "loss": 0.6605,
+ "step": 4948
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.526667694858214e-09,
+ "loss": 0.5979,
+ "step": 4949
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.277899695995461e-09,
+ "loss": 0.7463,
+ "step": 4950
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.033310660065074e-09,
+ "loss": 0.7076,
+ "step": 4951
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.792900689356341e-09,
+ "loss": 0.6262,
+ "step": 4952
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.556669884408839e-09,
+ "loss": 0.5602,
+ "step": 4953
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.3246183440124344e-09,
+ "loss": 0.6052,
+ "step": 4954
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.09674616521283e-09,
+ "loss": 0.7171,
+ "step": 4955
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.873053443307131e-09,
+ "loss": 0.6427,
+ "step": 4956
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.653540271841618e-09,
+ "loss": 0.6792,
+ "step": 4957
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.438206742617302e-09,
+ "loss": 0.6489,
+ "step": 4958
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.2270529456888155e-09,
+ "loss": 0.7584,
+ "step": 4959
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.0200789693588544e-09,
+ "loss": 0.6937,
+ "step": 4960
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.817284900183738e-09,
+ "loss": 0.6157,
+ "step": 4961
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.618670822974514e-09,
+ "loss": 0.7559,
+ "step": 4962
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.424236820789185e-09,
+ "loss": 0.709,
+ "step": 4963
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.233982974940487e-09,
+ "loss": 0.6955,
+ "step": 4964
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.047909364994773e-09,
+ "loss": 0.8074,
+ "step": 4965
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.866016068766465e-09,
+ "loss": 0.669,
+ "step": 4966
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.688303162322493e-09,
+ "loss": 0.7238,
+ "step": 4967
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.514770719984517e-09,
+ "loss": 0.7436,
+ "step": 4968
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.3454188143233758e-09,
+ "loss": 0.6004,
+ "step": 4969
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.1802475161624157e-09,
+ "loss": 0.6809,
+ "step": 4970
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.019256894575273e-09,
+ "loss": 0.6802,
+ "step": 4971
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.8624470168892026e-09,
+ "loss": 0.6016,
+ "step": 4972
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.7098179486806376e-09,
+ "loss": 0.6417,
+ "step": 4973
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.5613697537818505e-09,
+ "loss": 0.6739,
+ "step": 4974
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.4171024942720723e-09,
+ "loss": 0.7047,
+ "step": 4975
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.2770162304852626e-09,
+ "loss": 0.6052,
+ "step": 4976
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.14111102100345e-09,
+ "loss": 0.6047,
+ "step": 4977
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.0093869226645024e-09,
+ "loss": 0.606,
+ "step": 4978
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.881843990554355e-09,
+ "loss": 0.7004,
+ "step": 4979
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.7584822780125633e-09,
+ "loss": 0.7405,
+ "step": 4980
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.6393018366278601e-09,
+ "loss": 0.6513,
+ "step": 4981
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.524302716241488e-09,
+ "loss": 0.5887,
+ "step": 4982
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.4134849649471982e-09,
+ "loss": 0.6465,
+ "step": 4983
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.3068486290890303e-09,
+ "loss": 0.7206,
+ "step": 4984
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.2043937532613126e-09,
+ "loss": 0.6833,
+ "step": 4985
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.1061203803119924e-09,
+ "loss": 0.6588,
+ "step": 4986
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.0120285513381957e-09,
+ "loss": 0.5932,
+ "step": 4987
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 9.221183056895566e-10,
+ "loss": 0.6631,
+ "step": 4988
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.363896809659989e-10,
+ "loss": 0.6787,
+ "step": 4989
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.548427130199542e-10,
+ "loss": 0.7941,
+ "step": 4990
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 6.774774359541436e-10,
+ "loss": 0.6388,
+ "step": 4991
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 6.042938821226863e-10,
+ "loss": 0.6437,
+ "step": 4992
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 5.352920821311002e-10,
+ "loss": 0.6248,
+ "step": 4993
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.704720648374128e-10,
+ "loss": 0.6858,
+ "step": 4994
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.0983385734660875e-10,
+ "loss": 0.6363,
+ "step": 4995
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.533774850195126e-10,
+ "loss": 0.6533,
+ "step": 4996
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.011029714650171e-10,
+ "loss": 0.6493,
+ "step": 4997
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.5301033854563394e-10,
+ "loss": 0.6093,
+ "step": 4998
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.090996063741635e-10,
+ "loss": 0.7029,
+ "step": 4999
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.693707933114741e-10,
+ "loss": 0.6288,
+ "step": 5000
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.3382391597538403e-10,
+ "loss": 0.637,
+ "step": 5001
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.0245898922844889e-10,
+ "loss": 0.5249,
+ "step": 5002
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.527602619017415e-11,
+ "loss": 0.6484,
+ "step": 5003
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 5.227503822702318e-11,
+ "loss": 0.7082,
+ "step": 5004
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.3456034959078456e-11,
+ "loss": 0.5691,
+ "step": 5005
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.8819024255600782e-11,
+ "loss": 0.6216,
+ "step": 5006
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.364012237249698e-12,
+ "loss": 0.7011,
+ "step": 5007
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.091003278303916e-12,
+ "loss": 0.5181,
+ "step": 5008
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0,
+ "loss": 0.6311,
+ "step": 5009
+ },
+ {
+ "epoch": 1.0,
+ "step": 5009,
+ "total_flos": 359070384291840.0,
+ "train_loss": 0.7147823252418984,
+ "train_runtime": 12071.802,
+ "train_samples_per_second": 26.555,
+ "train_steps_per_second": 0.415
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 5009,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 50000,
+ "total_flos": 359070384291840.0,
+ "train_batch_size": 16,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/ICM-LLaVA-v1.5-7B/training_args.bin b/ICM-LLaVA-v1.5-7B/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..fb21ea51684374c104f104a97e284461a07d2a57
--- /dev/null
+++ b/ICM-LLaVA-v1.5-7B/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:b6cd97e85dee6b88e10066398cd68e2478589104b40c77d1f9bc35a290404124
+size 6776
diff --git a/ICM-LLaVA-v1.6-13B/config.json b/ICM-LLaVA-v1.6-13B/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2b48cfca7f8b36d08ca7fff4499ae12cefb50bfe
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/config.json
@@ -0,0 +1,74 @@
+{
+ "_name_or_path": "liuhaotian/llava-v1.6-vicuna-13b",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "freeze_mm_vision_resampler": false,
+ "hidden_act": "silu",
+ "hidden_size": 5120,
+ "image_aspect_ratio": "pad",
+ "image_crop_resolution": 224,
+ "image_grid_pinpoints": [
+ [
+ 336,
+ 672
+ ],
+ [
+ 672,
+ 336
+ ],
+ [
+ 672,
+ 672
+ ],
+ [
+ 1008,
+ 336
+ ],
+ [
+ 336,
+ 1008
+ ]
+ ],
+ "image_split_resolution": 224,
+ "initializer_range": 0.02,
+ "intermediate_size": 13824,
+ "max_length": 4096,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1024,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_resampler_type": null,
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
+ "mm_vision_tower_lr": 2e-06,
+ "model_type": "llava_llama",
+ "num_attention_heads": 40,
+ "num_hidden_layers": 40,
+ "num_key_value_heads": 40,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 2048,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.37.2",
+ "tune_mm_mlp_adapter": false,
+ "tune_mm_vision_resampler": false,
+ "unfreeze_mm_vision_tower": true,
+ "use_cache": true,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/ICM-LLaVA-v1.6-13B/generation_config.json b/ICM-LLaVA-v1.6-13B/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..70ffa4e13b28eca9f452207a778bb73c036f3d03
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/generation_config.json
@@ -0,0 +1,8 @@
+{
+ "_from_model_config": true,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "transformers_version": "4.37.2"
+}
diff --git a/ICM-LLaVA-v1.6-13B/model-00001-of-00006.safetensors b/ICM-LLaVA-v1.6-13B/model-00001-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..2a6057a9b21ab47724bb6ae5869ae82e65708d02
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/model-00001-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3a03e29c400a790348553d29d06c969c1028c29396c812ff0a1157ade78fedf4
+size 4978276128
diff --git a/ICM-LLaVA-v1.6-13B/model-00002-of-00006.safetensors b/ICM-LLaVA-v1.6-13B/model-00002-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..4d7ad98108b68e30988bbb389283e001e8eb219f
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/model-00002-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:6952772f3f1438c52a60920622d56ce94e31688367f8ebc602e4c2550cf1bd79
+size 4970422232
diff --git a/ICM-LLaVA-v1.6-13B/model-00003-of-00006.safetensors b/ICM-LLaVA-v1.6-13B/model-00003-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..1bd935b5e4c936d6f3b88da7d1658a1c177e2aab
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/model-00003-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:ba3a8b00f66a5dd8d7de7ff9c2ee64cedb0bef66e395a69498cab4c9391e3266
+size 4970422256
diff --git a/ICM-LLaVA-v1.6-13B/model-00004-of-00006.safetensors b/ICM-LLaVA-v1.6-13B/model-00004-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..cfa52c6944ea677d46bc714f5e0a237062a5bc08
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/model-00004-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e26ac2c2cf9aa7688579270b4c13eb4b7a072dca59b9ce79859004aeda1e44c3
+size 4933701504
diff --git a/ICM-LLaVA-v1.6-13B/model-00005-of-00006.safetensors b/ICM-LLaVA-v1.6-13B/model-00005-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..099a99b0096062c08d3dea77992000b7e0ab67b2
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/model-00005-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:3a5e1839515b13fe4913cc3fd0d5034926761c7acac17877eb492c1dd7a694d3
+size 4933722216
diff --git a/ICM-LLaVA-v1.6-13B/model-00006-of-00006.safetensors b/ICM-LLaVA-v1.6-13B/model-00006-of-00006.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..7abc174f067d12f273957a080020202aa9721042
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/model-00006-of-00006.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:8b794a9624ca47ddc39d36c5ae792f51cc85aafb40014313ee60493fe3a4c593
+size 1915248664
diff --git a/ICM-LLaVA-v1.6-13B/model.safetensors.index.json b/ICM-LLaVA-v1.6-13B/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..5cb0534e5af0581cc99cf491b62264df614dd647
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/model.safetensors.index.json
@@ -0,0 +1,766 @@
+{
+ "metadata": {
+ "total_size": 26701688832
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00006-of-00006.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00006.safetensors",
+ "model.image_newline": "model-00001-of-00006.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00003-of-00006.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00004-of-00006.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.32.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.33.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.34.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.35.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.36.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.input_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.down_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.mlp.up_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.post_attention_layernorm.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.37.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.input_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.mlp.gate_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.38.self_attn.k_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.o_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.q_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.38.self_attn.v_proj.weight": "model-00005-of-00006.safetensors",
+ "model.layers.39.input_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.down_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.gate_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.mlp.up_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.post_attention_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.o_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.39.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00006.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00002-of-00006.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00002-of-00006.safetensors",
+ "model.mm_projector.0.bias": "model-00006-of-00006.safetensors",
+ "model.mm_projector.0.weight": "model-00006-of-00006.safetensors",
+ "model.mm_projector.2.bias": "model-00006-of-00006.safetensors",
+ "model.mm_projector.2.weight": "model-00006-of-00006.safetensors",
+ "model.norm.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.class_embedding": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.bias": "model-00006-of-00006.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.weight": "model-00006-of-00006.safetensors"
+ }
+}
diff --git a/ICM-LLaVA-v1.6-13B/runs/Jan20_10-45-04_hk01dgx043/events.out.tfevents.1737341853.hk01dgx043.200334.0 b/ICM-LLaVA-v1.6-13B/runs/Jan20_10-45-04_hk01dgx043/events.out.tfevents.1737341853.hk01dgx043.200334.0
new file mode 100644
index 0000000000000000000000000000000000000000..2e60eb2aaec0a45e0fcd001622c5def4f14b8cdf
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/runs/Jan20_10-45-04_hk01dgx043/events.out.tfevents.1737341853.hk01dgx043.200334.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e34ce731b27e33e9c9e22f613abdadc299addcf1a96ac967119daf2f4fee668e
+size 399135
diff --git a/ICM-LLaVA-v1.6-13B/special_tokens_map.json b/ICM-LLaVA-v1.6-13B/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/ICM-LLaVA-v1.6-13B/tokenizer.model b/ICM-LLaVA-v1.6-13B/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/ICM-LLaVA-v1.6-13B/tokenizer_config.json b/ICM-LLaVA-v1.6-13B/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/ICM-LLaVA-v1.6-13B/trainer_state.json b/ICM-LLaVA-v1.6-13B/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..870e010abe664f47e5fa3d64dca8cbaed071b778
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/trainer_state.json
@@ -0,0 +1,15060 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.0,
+ "eval_steps": 500,
+ "global_step": 2505,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6315789473684213e-07,
+ "loss": 1.4543,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.263157894736843e-07,
+ "loss": 1.5629,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.894736842105263e-07,
+ "loss": 1.4761,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0526315789473685e-06,
+ "loss": 1.363,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.3157894736842106e-06,
+ "loss": 1.4769,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5789473684210526e-06,
+ "loss": 1.4652,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.8421052631578948e-06,
+ "loss": 1.4577,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.105263157894737e-06,
+ "loss": 1.3209,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.368421052631579e-06,
+ "loss": 1.2983,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.631578947368421e-06,
+ "loss": 1.1688,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8947368421052634e-06,
+ "loss": 1.1566,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.157894736842105e-06,
+ "loss": 1.1409,
+ "step": 12
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.421052631578948e-06,
+ "loss": 0.9698,
+ "step": 13
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.6842105263157896e-06,
+ "loss": 1.0189,
+ "step": 14
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.947368421052632e-06,
+ "loss": 0.9533,
+ "step": 15
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.210526315789474e-06,
+ "loss": 1.0382,
+ "step": 16
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.473684210526316e-06,
+ "loss": 0.9872,
+ "step": 17
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.736842105263158e-06,
+ "loss": 0.8664,
+ "step": 18
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 0.9681,
+ "step": 19
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.263157894736842e-06,
+ "loss": 1.0012,
+ "step": 20
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.526315789473685e-06,
+ "loss": 0.934,
+ "step": 21
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.789473684210527e-06,
+ "loss": 0.901,
+ "step": 22
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.0526315789473685e-06,
+ "loss": 0.9607,
+ "step": 23
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.31578947368421e-06,
+ "loss": 0.8542,
+ "step": 24
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.578947368421054e-06,
+ "loss": 0.8525,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.842105263157896e-06,
+ "loss": 0.9267,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.1052631578947375e-06,
+ "loss": 0.9264,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.368421052631579e-06,
+ "loss": 0.8742,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.631578947368423e-06,
+ "loss": 0.922,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.894736842105265e-06,
+ "loss": 0.9025,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.157894736842106e-06,
+ "loss": 0.9195,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.421052631578948e-06,
+ "loss": 0.876,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.68421052631579e-06,
+ "loss": 0.8704,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.947368421052632e-06,
+ "loss": 0.8854,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.210526315789474e-06,
+ "loss": 0.8345,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.473684210526315e-06,
+ "loss": 0.8421,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.736842105263159e-06,
+ "loss": 0.68,
+ "step": 37
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 0.8103,
+ "step": 38
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0263157894736844e-05,
+ "loss": 0.8701,
+ "step": 39
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0526315789473684e-05,
+ "loss": 0.853,
+ "step": 40
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0789473684210528e-05,
+ "loss": 0.7756,
+ "step": 41
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.105263157894737e-05,
+ "loss": 0.8384,
+ "step": 42
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1315789473684212e-05,
+ "loss": 0.7995,
+ "step": 43
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1578947368421053e-05,
+ "loss": 0.7323,
+ "step": 44
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1842105263157895e-05,
+ "loss": 0.7436,
+ "step": 45
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2105263157894737e-05,
+ "loss": 0.8182,
+ "step": 46
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.236842105263158e-05,
+ "loss": 0.8451,
+ "step": 47
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.263157894736842e-05,
+ "loss": 0.856,
+ "step": 48
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2894736842105264e-05,
+ "loss": 0.8425,
+ "step": 49
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3157894736842108e-05,
+ "loss": 0.8569,
+ "step": 50
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3421052631578948e-05,
+ "loss": 0.8401,
+ "step": 51
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3684210526315791e-05,
+ "loss": 0.7044,
+ "step": 52
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3947368421052631e-05,
+ "loss": 0.7978,
+ "step": 53
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4210526315789475e-05,
+ "loss": 0.8081,
+ "step": 54
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4473684210526317e-05,
+ "loss": 0.7315,
+ "step": 55
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4736842105263159e-05,
+ "loss": 0.7821,
+ "step": 56
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 0.8086,
+ "step": 57
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5263157894736846e-05,
+ "loss": 0.8023,
+ "step": 58
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5526315789473686e-05,
+ "loss": 0.7946,
+ "step": 59
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.578947368421053e-05,
+ "loss": 0.7953,
+ "step": 60
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.605263157894737e-05,
+ "loss": 0.8065,
+ "step": 61
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6315789473684213e-05,
+ "loss": 0.8118,
+ "step": 62
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6578947368421053e-05,
+ "loss": 0.7321,
+ "step": 63
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6842105263157896e-05,
+ "loss": 0.725,
+ "step": 64
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7105263157894737e-05,
+ "loss": 0.7923,
+ "step": 65
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.736842105263158e-05,
+ "loss": 0.7885,
+ "step": 66
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.763157894736842e-05,
+ "loss": 0.7672,
+ "step": 67
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7894736842105264e-05,
+ "loss": 0.8092,
+ "step": 68
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8157894736842107e-05,
+ "loss": 0.8406,
+ "step": 69
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8421052631578947e-05,
+ "loss": 0.7611,
+ "step": 70
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.868421052631579e-05,
+ "loss": 0.7858,
+ "step": 71
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.894736842105263e-05,
+ "loss": 0.8263,
+ "step": 72
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9210526315789474e-05,
+ "loss": 0.7259,
+ "step": 73
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9473684210526318e-05,
+ "loss": 0.7304,
+ "step": 74
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9736842105263158e-05,
+ "loss": 0.8221,
+ "step": 75
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 0.8019,
+ "step": 76
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999991635987763e-05,
+ "loss": 0.776,
+ "step": 77
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999966543965042e-05,
+ "loss": 0.7901,
+ "step": 78
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999924723973812e-05,
+ "loss": 0.765,
+ "step": 79
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999866176084026e-05,
+ "loss": 0.7485,
+ "step": 80
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999790900393628e-05,
+ "loss": 0.761,
+ "step": 81
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999698897028537e-05,
+ "loss": 0.7795,
+ "step": 82
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999590166142656e-05,
+ "loss": 0.773,
+ "step": 83
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999946470791787e-05,
+ "loss": 0.8356,
+ "step": 84
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999322522564048e-05,
+ "loss": 0.8307,
+ "step": 85
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999163610319035e-05,
+ "loss": 0.7643,
+ "step": 86
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998987971448664e-05,
+ "loss": 0.7712,
+ "step": 87
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998795606246738e-05,
+ "loss": 0.7558,
+ "step": 88
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998586515035053e-05,
+ "loss": 0.8393,
+ "step": 89
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998360698163375e-05,
+ "loss": 0.7454,
+ "step": 90
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999811815600945e-05,
+ "loss": 0.7492,
+ "step": 91
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997858888978997e-05,
+ "loss": 0.7378,
+ "step": 92
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999758289750573e-05,
+ "loss": 0.8004,
+ "step": 93
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999729018205132e-05,
+ "loss": 0.7749,
+ "step": 94
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996980743105427e-05,
+ "loss": 0.8817,
+ "step": 95
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999665458118568e-05,
+ "loss": 0.8174,
+ "step": 96
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999631169683768e-05,
+ "loss": 0.7853,
+ "step": 97
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995952090635007e-05,
+ "loss": 0.8346,
+ "step": 98
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995575763179213e-05,
+ "loss": 0.767,
+ "step": 99
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999518271509982e-05,
+ "loss": 0.7894,
+ "step": 100
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999477294705431e-05,
+ "loss": 0.8027,
+ "step": 101
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999434645972816e-05,
+ "loss": 0.7769,
+ "step": 102
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999390325383479e-05,
+ "loss": 0.8137,
+ "step": 103
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993443330115592e-05,
+ "loss": 0.8682,
+ "step": 104
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992966689339936e-05,
+ "loss": 0.7383,
+ "step": 105
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992473332305145e-05,
+ "loss": 0.8131,
+ "step": 106
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991963259836504e-05,
+ "loss": 0.7208,
+ "step": 107
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991436472787267e-05,
+ "loss": 0.8098,
+ "step": 108
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990892972038646e-05,
+ "loss": 0.7771,
+ "step": 109
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990332758499805e-05,
+ "loss": 0.838,
+ "step": 110
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989755833107875e-05,
+ "loss": 0.7057,
+ "step": 111
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998916219682794e-05,
+ "loss": 0.8292,
+ "step": 112
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998855185065303e-05,
+ "loss": 0.7767,
+ "step": 113
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998792479560414e-05,
+ "loss": 0.8129,
+ "step": 114
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987281032730206e-05,
+ "loss": 0.7638,
+ "step": 115
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986620563108117e-05,
+ "loss": 0.74,
+ "step": 116
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985943387842704e-05,
+ "loss": 0.8243,
+ "step": 117
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985249508066754e-05,
+ "loss": 0.8119,
+ "step": 118
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984538924940987e-05,
+ "loss": 0.7776,
+ "step": 119
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998381163965407e-05,
+ "loss": 0.8372,
+ "step": 120
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983067653422603e-05,
+ "loss": 0.8476,
+ "step": 121
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982306967491136e-05,
+ "loss": 0.7686,
+ "step": 122
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998152958313214e-05,
+ "loss": 0.7622,
+ "step": 123
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980735501646026e-05,
+ "loss": 0.7519,
+ "step": 124
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997992472436114e-05,
+ "loss": 0.8242,
+ "step": 125
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979097252633748e-05,
+ "loss": 0.7724,
+ "step": 126
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978253087848046e-05,
+ "loss": 0.803,
+ "step": 127
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977392231416158e-05,
+ "loss": 0.8038,
+ "step": 128
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976514684778124e-05,
+ "loss": 0.8082,
+ "step": 129
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9975620449401913e-05,
+ "loss": 0.7481,
+ "step": 130
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997470952678339e-05,
+ "loss": 0.7618,
+ "step": 131
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973781918446363e-05,
+ "loss": 0.7917,
+ "step": 132
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972837625942533e-05,
+ "loss": 0.7767,
+ "step": 133
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997187665085151e-05,
+ "loss": 0.7645,
+ "step": 134
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997089899478082e-05,
+ "loss": 0.7905,
+ "step": 135
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969904659365887e-05,
+ "loss": 0.7634,
+ "step": 136
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968893646270037e-05,
+ "loss": 0.8162,
+ "step": 137
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99678659571845e-05,
+ "loss": 0.7982,
+ "step": 138
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9966821593828393e-05,
+ "loss": 0.8189,
+ "step": 139
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996576055794873e-05,
+ "loss": 0.7436,
+ "step": 140
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996468285132041e-05,
+ "loss": 0.7795,
+ "step": 141
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9963588475746233e-05,
+ "loss": 0.8225,
+ "step": 142
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962477433056864e-05,
+ "loss": 0.7917,
+ "step": 143
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996134972511086e-05,
+ "loss": 0.7836,
+ "step": 144
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996020535379466e-05,
+ "loss": 0.7549,
+ "step": 145
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959044321022563e-05,
+ "loss": 0.8093,
+ "step": 146
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957866628736756e-05,
+ "loss": 0.7819,
+ "step": 147
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956672278907273e-05,
+ "loss": 0.8348,
+ "step": 148
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955461273532037e-05,
+ "loss": 0.7491,
+ "step": 149
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954233614636817e-05,
+ "loss": 0.799,
+ "step": 150
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995298930427524e-05,
+ "loss": 0.7806,
+ "step": 151
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99517283445288e-05,
+ "loss": 0.7639,
+ "step": 152
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950450737506825e-05,
+ "loss": 0.7414,
+ "step": 153
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949156485346502e-05,
+ "loss": 0.7962,
+ "step": 154
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994784559021286e-05,
+ "loss": 0.8227,
+ "step": 155
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946518054298768e-05,
+ "loss": 0.8049,
+ "step": 156
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994517387982493e-05,
+ "loss": 0.7617,
+ "step": 157
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943813069039883e-05,
+ "loss": 0.8057,
+ "step": 158
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994243562422e-05,
+ "loss": 0.7601,
+ "step": 159
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941041547669467e-05,
+ "loss": 0.7954,
+ "step": 160
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939630841720302e-05,
+ "loss": 0.7713,
+ "step": 161
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993820350873234e-05,
+ "loss": 0.702,
+ "step": 162
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993675955109322e-05,
+ "loss": 0.7307,
+ "step": 163
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993529897121841e-05,
+ "loss": 0.7865,
+ "step": 164
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993382177155116e-05,
+ "loss": 0.7169,
+ "step": 165
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993232795456254e-05,
+ "loss": 0.7385,
+ "step": 166
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930817522751403e-05,
+ "loss": 0.7464,
+ "step": 167
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9929290478644412e-05,
+ "loss": 0.7863,
+ "step": 168
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927746824796003e-05,
+ "loss": 0.7797,
+ "step": 169
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992618656378841e-05,
+ "loss": 0.8162,
+ "step": 170
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924609698231637e-05,
+ "loss": 0.7681,
+ "step": 171
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992301623076347e-05,
+ "loss": 0.7604,
+ "step": 172
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921406164049467e-05,
+ "loss": 0.7758,
+ "step": 173
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991977950078295e-05,
+ "loss": 0.7768,
+ "step": 174
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918136243685003e-05,
+ "loss": 0.7594,
+ "step": 175
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916476395504473e-05,
+ "loss": 0.7524,
+ "step": 176
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991479995901796e-05,
+ "loss": 0.7518,
+ "step": 177
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991310693702981e-05,
+ "loss": 0.8093,
+ "step": 178
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991139733237211e-05,
+ "loss": 0.7821,
+ "step": 179
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9909671147904693e-05,
+ "loss": 0.7505,
+ "step": 180
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907928386515126e-05,
+ "loss": 0.8454,
+ "step": 181
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990616905111871e-05,
+ "loss": 0.7945,
+ "step": 182
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904393144658455e-05,
+ "loss": 0.7365,
+ "step": 183
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902600670105107e-05,
+ "loss": 0.7623,
+ "step": 184
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900791630457122e-05,
+ "loss": 0.743,
+ "step": 185
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9898966028740662e-05,
+ "loss": 0.8165,
+ "step": 186
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897123868009606e-05,
+ "loss": 0.6962,
+ "step": 187
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9895265151345516e-05,
+ "loss": 0.8441,
+ "step": 188
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893389881857665e-05,
+ "loss": 0.798,
+ "step": 189
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891498062683005e-05,
+ "loss": 0.7589,
+ "step": 190
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9889589696986173e-05,
+ "loss": 0.8189,
+ "step": 191
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9887664787959495e-05,
+ "loss": 0.7072,
+ "step": 192
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988572333882296e-05,
+ "loss": 0.709,
+ "step": 193
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9883765352824224e-05,
+ "loss": 0.7852,
+ "step": 194
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881790833238615e-05,
+ "loss": 0.7641,
+ "step": 195
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9879799783369117e-05,
+ "loss": 0.7677,
+ "step": 196
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987779220654636e-05,
+ "loss": 0.7481,
+ "step": 197
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875768106128627e-05,
+ "loss": 0.7224,
+ "step": 198
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987372748550183e-05,
+ "loss": 0.7573,
+ "step": 199
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987167034807953e-05,
+ "loss": 0.7166,
+ "step": 200
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869596697302912e-05,
+ "loss": 0.7318,
+ "step": 201
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9867506536640782e-05,
+ "loss": 0.7707,
+ "step": 202
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9865399869589565e-05,
+ "loss": 0.6767,
+ "step": 203
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863276699673305e-05,
+ "loss": 0.7658,
+ "step": 204
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9861137030443638e-05,
+ "loss": 0.7782,
+ "step": 205
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985898086547981e-05,
+ "loss": 0.7294,
+ "step": 206
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856808208388663e-05,
+ "loss": 0.8152,
+ "step": 207
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985461906280462e-05,
+ "loss": 0.7433,
+ "step": 208
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852413432389685e-05,
+ "loss": 0.7528,
+ "step": 209
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985019132083345e-05,
+ "loss": 0.7832,
+ "step": 210
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847952731853067e-05,
+ "loss": 0.8086,
+ "step": 211
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9845697669193248e-05,
+ "loss": 0.787,
+ "step": 212
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984342613662627e-05,
+ "loss": 0.7599,
+ "step": 213
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984113813795196e-05,
+ "loss": 0.7485,
+ "step": 214
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9838833676997687e-05,
+ "loss": 0.8036,
+ "step": 215
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836512757618355e-05,
+ "loss": 0.7336,
+ "step": 216
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983417538369641e-05,
+ "loss": 0.7306,
+ "step": 217
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831821559141817e-05,
+ "loss": 0.8278,
+ "step": 218
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829451287892048e-05,
+ "loss": 0.7243,
+ "step": 219
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9827064573912113e-05,
+ "loss": 0.8199,
+ "step": 220
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824661421194506e-05,
+ "loss": 0.7144,
+ "step": 221
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241833759226e-05,
+ "loss": 0.7488,
+ "step": 222
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819805815653768e-05,
+ "loss": 0.7542,
+ "step": 223
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817353370953105e-05,
+ "loss": 0.8498,
+ "step": 224
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9814884503759698e-05,
+ "loss": 0.8274,
+ "step": 225
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812399218203467e-05,
+ "loss": 0.8012,
+ "step": 226
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809897518441813e-05,
+ "loss": 0.7795,
+ "step": 227
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980737940865958e-05,
+ "loss": 0.7739,
+ "step": 228
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9804844893069063e-05,
+ "loss": 0.8206,
+ "step": 229
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9802293975910016e-05,
+ "loss": 0.7381,
+ "step": 230
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979972666144961e-05,
+ "loss": 0.8139,
+ "step": 231
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9797142953982463e-05,
+ "loss": 0.7468,
+ "step": 232
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794542857830605e-05,
+ "loss": 0.75,
+ "step": 233
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979192637734348e-05,
+ "loss": 0.7616,
+ "step": 234
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789293516897947e-05,
+ "loss": 0.7926,
+ "step": 235
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9786644280898258e-05,
+ "loss": 0.7735,
+ "step": 236
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9783978673776067e-05,
+ "loss": 0.7159,
+ "step": 237
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9781296699990398e-05,
+ "loss": 0.7479,
+ "step": 238
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977859836402767e-05,
+ "loss": 0.7343,
+ "step": 239
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775883670401667e-05,
+ "loss": 0.7427,
+ "step": 240
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773152623653535e-05,
+ "loss": 0.7735,
+ "step": 241
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977040522835177e-05,
+ "loss": 0.8466,
+ "step": 242
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767641489092232e-05,
+ "loss": 0.6901,
+ "step": 243
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97648614104981e-05,
+ "loss": 0.7421,
+ "step": 244
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97620649972199e-05,
+ "loss": 0.7394,
+ "step": 245
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9759252253935485e-05,
+ "loss": 0.759,
+ "step": 246
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975642318535001e-05,
+ "loss": 0.7147,
+ "step": 247
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975357779619595e-05,
+ "loss": 0.8524,
+ "step": 248
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750716091233083e-05,
+ "loss": 0.743,
+ "step": 249
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974783807524847e-05,
+ "loss": 0.8265,
+ "step": 250
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974494375305647e-05,
+ "loss": 0.7616,
+ "step": 251
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974203312949871e-05,
+ "loss": 0.7433,
+ "step": 252
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9739106209444087e-05,
+ "loss": 0.7489,
+ "step": 253
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736162997788757e-05,
+ "loss": 0.7249,
+ "step": 254
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733203499456136e-05,
+ "loss": 0.7342,
+ "step": 255
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9730227719396883e-05,
+ "loss": 0.7705,
+ "step": 256
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9727235662588882e-05,
+ "loss": 0.748,
+ "step": 257
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9724227334037255e-05,
+ "loss": 0.6947,
+ "step": 258
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9721202738774346e-05,
+ "loss": 0.7895,
+ "step": 259
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9718161881859703e-05,
+ "loss": 0.8213,
+ "step": 260
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9715104768380078e-05,
+ "loss": 0.7562,
+ "step": 261
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971203140344942e-05,
+ "loss": 0.7942,
+ "step": 262
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9708941792208857e-05,
+ "loss": 0.7643,
+ "step": 263
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9705835939826704e-05,
+ "loss": 0.7713,
+ "step": 264
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9702713851498436e-05,
+ "loss": 0.751,
+ "step": 265
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9699575532446688e-05,
+ "loss": 0.7403,
+ "step": 266
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969642098792125e-05,
+ "loss": 0.6564,
+ "step": 267
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693250223199053e-05,
+ "loss": 0.7397,
+ "step": 268
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690063243584153e-05,
+ "loss": 0.798,
+ "step": 269
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686860054407745e-05,
+ "loss": 0.7523,
+ "step": 270
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968364066102813e-05,
+ "loss": 0.7466,
+ "step": 271
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9680405068830716e-05,
+ "loss": 0.8211,
+ "step": 272
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967715328322801e-05,
+ "loss": 0.7025,
+ "step": 273
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967388530965961e-05,
+ "loss": 0.7867,
+ "step": 274
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967060115359218e-05,
+ "loss": 0.791,
+ "step": 275
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9667300820519478e-05,
+ "loss": 0.8126,
+ "step": 276
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96639843159623e-05,
+ "loss": 0.7825,
+ "step": 277
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9660651645468504e-05,
+ "loss": 0.7935,
+ "step": 278
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965730281461299e-05,
+ "loss": 0.7157,
+ "step": 279
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9653937828997694e-05,
+ "loss": 0.7276,
+ "step": 280
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965055669425157e-05,
+ "loss": 0.7759,
+ "step": 281
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9647159416030588e-05,
+ "loss": 0.7915,
+ "step": 282
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9643746000017722e-05,
+ "loss": 0.7868,
+ "step": 283
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9640316451922943e-05,
+ "loss": 0.7688,
+ "step": 284
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636870777483207e-05,
+ "loss": 0.7518,
+ "step": 285
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633408982462453e-05,
+ "loss": 0.737,
+ "step": 286
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629931072651572e-05,
+ "loss": 0.7581,
+ "step": 287
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9626437053868423e-05,
+ "loss": 0.7852,
+ "step": 288
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962292693195781e-05,
+ "loss": 0.7444,
+ "step": 289
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9619400712791473e-05,
+ "loss": 0.7397,
+ "step": 290
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961585840226808e-05,
+ "loss": 0.7723,
+ "step": 291
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9612300006313216e-05,
+ "loss": 0.7497,
+ "step": 292
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9608725530879376e-05,
+ "loss": 0.7497,
+ "step": 293
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960513498194595e-05,
+ "loss": 0.7127,
+ "step": 294
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9601528365519218e-05,
+ "loss": 0.7249,
+ "step": 295
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597905687632336e-05,
+ "loss": 0.7486,
+ "step": 296
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959426695434533e-05,
+ "loss": 0.7892,
+ "step": 297
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590612171745076e-05,
+ "loss": 0.8106,
+ "step": 298
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958694134594531e-05,
+ "loss": 0.7707,
+ "step": 299
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95832544830866e-05,
+ "loss": 0.7664,
+ "step": 300
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579551589336333e-05,
+ "loss": 0.7668,
+ "step": 301
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9575832670888723e-05,
+ "loss": 0.7391,
+ "step": 302
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957209773396478e-05,
+ "loss": 0.8085,
+ "step": 303
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956834678481232e-05,
+ "loss": 0.7526,
+ "step": 304
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9564579829705945e-05,
+ "loss": 0.793,
+ "step": 305
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9560796874947016e-05,
+ "loss": 0.6835,
+ "step": 306
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9556997926863676e-05,
+ "loss": 0.7536,
+ "step": 307
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9553182991810812e-05,
+ "loss": 0.6964,
+ "step": 308
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954935207617006e-05,
+ "loss": 0.7704,
+ "step": 309
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954550518634978e-05,
+ "loss": 0.7835,
+ "step": 310
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541642328785065e-05,
+ "loss": 0.7666,
+ "step": 311
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537763509937707e-05,
+ "loss": 0.7768,
+ "step": 312
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533868736296205e-05,
+ "loss": 0.7214,
+ "step": 313
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529958014375748e-05,
+ "loss": 0.7116,
+ "step": 314
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95260313507182e-05,
+ "loss": 0.7477,
+ "step": 315
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952208875189209e-05,
+ "loss": 0.7579,
+ "step": 316
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518130224492612e-05,
+ "loss": 0.728,
+ "step": 317
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9514155775141602e-05,
+ "loss": 0.7589,
+ "step": 318
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9510165410487524e-05,
+ "loss": 0.7324,
+ "step": 319
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950615913720547e-05,
+ "loss": 0.7095,
+ "step": 320
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9502136961997144e-05,
+ "loss": 0.7771,
+ "step": 321
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9498098891590855e-05,
+ "loss": 0.789,
+ "step": 322
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9494044932741488e-05,
+ "loss": 0.7614,
+ "step": 323
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948997509223052e-05,
+ "loss": 0.7311,
+ "step": 324
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485889376865998e-05,
+ "loss": 0.6904,
+ "step": 325
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481787793482503e-05,
+ "loss": 0.7466,
+ "step": 326
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9477670348941182e-05,
+ "loss": 0.7677,
+ "step": 327
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473537050129704e-05,
+ "loss": 0.8235,
+ "step": 328
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469387903962263e-05,
+ "loss": 0.7905,
+ "step": 329
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946522291737956e-05,
+ "loss": 0.6732,
+ "step": 330
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461042097348796e-05,
+ "loss": 0.7864,
+ "step": 331
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9456845450863654e-05,
+ "loss": 0.7756,
+ "step": 332
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9452632984944295e-05,
+ "loss": 0.7661,
+ "step": 333
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9448404706637344e-05,
+ "loss": 0.7511,
+ "step": 334
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9444160623015877e-05,
+ "loss": 0.7842,
+ "step": 335
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94399007411794e-05,
+ "loss": 0.8302,
+ "step": 336
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9435625068253863e-05,
+ "loss": 0.7915,
+ "step": 337
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9431333611391615e-05,
+ "loss": 0.7481,
+ "step": 338
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.942702637777142e-05,
+ "loss": 0.8101,
+ "step": 339
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422703374598425e-05,
+ "loss": 0.7844,
+ "step": 340
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9418364609104166e-05,
+ "loss": 0.7448,
+ "step": 341
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9414010088546535e-05,
+ "loss": 0.753,
+ "step": 342
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.940963982020979e-05,
+ "loss": 0.7824,
+ "step": 343
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405253811404522e-05,
+ "loss": 0.8079,
+ "step": 344
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9400852069467657e-05,
+ "loss": 0.7841,
+ "step": 345
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9396434601762444e-05,
+ "loss": 0.7163,
+ "step": 346
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939200141567843e-05,
+ "loss": 0.748,
+ "step": 347
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938755251863146e-05,
+ "loss": 0.7229,
+ "step": 348
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9383087918063662e-05,
+ "loss": 0.7831,
+ "step": 349
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937860762144343e-05,
+ "loss": 0.6798,
+ "step": 350
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9374111636265412e-05,
+ "loss": 0.679,
+ "step": 351
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9369599970050506e-05,
+ "loss": 0.7818,
+ "step": 352
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9365072630345838e-05,
+ "loss": 0.7274,
+ "step": 353
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360529624724754e-05,
+ "loss": 0.7151,
+ "step": 354
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9355970960786803e-05,
+ "loss": 0.7676,
+ "step": 355
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935139664615773e-05,
+ "loss": 0.7159,
+ "step": 356
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934680668848946e-05,
+ "loss": 0.8012,
+ "step": 357
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9342201095460083e-05,
+ "loss": 0.7445,
+ "step": 358
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933757987477385e-05,
+ "loss": 0.7466,
+ "step": 359
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933294303416115e-05,
+ "loss": 0.8144,
+ "step": 360
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9328290581378497e-05,
+ "loss": 0.751,
+ "step": 361
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932362252420853e-05,
+ "loss": 0.7117,
+ "step": 362
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318938870459984e-05,
+ "loss": 0.7304,
+ "step": 363
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9314239627967688e-05,
+ "loss": 0.7965,
+ "step": 364
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309524804592545e-05,
+ "loss": 0.7674,
+ "step": 365
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9304794408221524e-05,
+ "loss": 0.6942,
+ "step": 366
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9300048446767643e-05,
+ "loss": 0.747,
+ "step": 367
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528692816996e-05,
+ "loss": 0.7358,
+ "step": 368
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929050986039355e-05,
+ "loss": 0.7759,
+ "step": 369
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9285717251429507e-05,
+ "loss": 0.7731,
+ "step": 370
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928090910929492e-05,
+ "loss": 0.7679,
+ "step": 371
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927608544203286e-05,
+ "loss": 0.7625,
+ "step": 372
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9271246257712368e-05,
+ "loss": 0.7134,
+ "step": 373
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.926639156442845e-05,
+ "loss": 0.7519,
+ "step": 374
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9261521370302035e-05,
+ "loss": 0.7832,
+ "step": 375
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925663568348001e-05,
+ "loss": 0.7434,
+ "step": 376
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9251734512135155e-05,
+ "loss": 0.718,
+ "step": 377
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246817864466166e-05,
+ "loss": 0.8239,
+ "step": 378
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241885748697618e-05,
+ "loss": 0.7648,
+ "step": 379
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236938173079974e-05,
+ "loss": 0.7714,
+ "step": 380
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231975145889543e-05,
+ "loss": 0.8305,
+ "step": 381
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9226996675428497e-05,
+ "loss": 0.8,
+ "step": 382
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222002770024827e-05,
+ "loss": 0.7684,
+ "step": 383
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921699343803235e-05,
+ "loss": 0.7262,
+ "step": 384
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9211968687830693e-05,
+ "loss": 0.6804,
+ "step": 385
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9206928527825268e-05,
+ "loss": 0.7708,
+ "step": 386
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920187296644727e-05,
+ "loss": 0.7454,
+ "step": 387
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.919680201215365e-05,
+ "loss": 0.7453,
+ "step": 388
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9191715673427113e-05,
+ "loss": 0.658,
+ "step": 389
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.91866139587761e-05,
+ "loss": 0.7105,
+ "step": 390
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.918149687673478e-05,
+ "loss": 0.7677,
+ "step": 391
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9176364435863005e-05,
+ "loss": 0.7289,
+ "step": 392
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9171216644746345e-05,
+ "loss": 0.723,
+ "step": 393
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9166053511996035e-05,
+ "loss": 0.6857,
+ "step": 394
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160875046248974e-05,
+ "loss": 0.6864,
+ "step": 395
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915568125616772e-05,
+ "loss": 0.6694,
+ "step": 396
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150472150440443e-05,
+ "loss": 0.7279,
+ "step": 397
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145247737780963e-05,
+ "loss": 0.6925,
+ "step": 398
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9140008026928683e-05,
+ "loss": 0.7604,
+ "step": 399
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9134753026648605e-05,
+ "loss": 0.831,
+ "step": 400
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9129482745731308e-05,
+ "loss": 0.8309,
+ "step": 401
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9124197192992927e-05,
+ "loss": 0.6992,
+ "step": 402
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.911889637727515e-05,
+ "loss": 0.7853,
+ "step": 403
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9113580307445196e-05,
+ "loss": 0.7525,
+ "step": 404
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9108248992395797e-05,
+ "loss": 0.7856,
+ "step": 405
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910290244104519e-05,
+ "loss": 0.7181,
+ "step": 406
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097540662337102e-05,
+ "loss": 0.7347,
+ "step": 407
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9092163665240726e-05,
+ "loss": 0.7439,
+ "step": 408
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9086771458750716e-05,
+ "loss": 0.7289,
+ "step": 409
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081364051887173e-05,
+ "loss": 0.7958,
+ "step": 410
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9075941453695617e-05,
+ "loss": 0.7566,
+ "step": 411
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9070503673246982e-05,
+ "loss": 0.7548,
+ "step": 412
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9065050719637604e-05,
+ "loss": 0.7936,
+ "step": 413
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9059582601989196e-05,
+ "loss": 0.7236,
+ "step": 414
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9054099329448835e-05,
+ "loss": 0.76,
+ "step": 415
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904860091118896e-05,
+ "loss": 0.7316,
+ "step": 416
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904308735640733e-05,
+ "loss": 0.7422,
+ "step": 417
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9037558674327036e-05,
+ "loss": 0.7552,
+ "step": 418
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9032014874196476e-05,
+ "loss": 0.6733,
+ "step": 419
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9026455965289326e-05,
+ "loss": 0.775,
+ "step": 420
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020881956904543e-05,
+ "loss": 0.7677,
+ "step": 421
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901529285836635e-05,
+ "loss": 0.6928,
+ "step": 422
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900968867902419e-05,
+ "loss": 0.7626,
+ "step": 423
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004069428252765e-05,
+ "loss": 0.7634,
+ "step": 424
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899843511545196e-05,
+ "loss": 0.7291,
+ "step": 425
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8992785750046866e-05,
+ "loss": 0.7965,
+ "step": 426
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.898712134148776e-05,
+ "loss": 0.722,
+ "step": 427
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8981441899250082e-05,
+ "loss": 0.835,
+ "step": 428
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897574743283441e-05,
+ "loss": 0.7233,
+ "step": 429
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8970037951766468e-05,
+ "loss": 0.7702,
+ "step": 430
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896431346559708e-05,
+ "loss": 0.7007,
+ "step": 431
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895857398390219e-05,
+ "loss": 0.768,
+ "step": 432
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895281951628281e-05,
+ "loss": 0.8233,
+ "step": 433
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8947050072365033e-05,
+ "loss": 0.722,
+ "step": 434
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89412656618e-05,
+ "loss": 0.7636,
+ "step": 435
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935466294263876e-05,
+ "loss": 0.7074,
+ "step": 436
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8929651979457874e-05,
+ "loss": 0.7719,
+ "step": 437
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.892382272710818e-05,
+ "loss": 0.7991,
+ "step": 438
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891797854696599e-05,
+ "loss": 0.7031,
+ "step": 439
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891211944880746e-05,
+ "loss": 0.6973,
+ "step": 440
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.89062454424337e-05,
+ "loss": 0.7632,
+ "step": 441
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8900356537670768e-05,
+ "loss": 0.7518,
+ "step": 442
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8894452744369632e-05,
+ "loss": 0.7463,
+ "step": 443
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8888534072406182e-05,
+ "loss": 0.6351,
+ "step": 444
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.888260053168118e-05,
+ "loss": 0.7522,
+ "step": 445
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876652132120267e-05,
+ "loss": 0.7774,
+ "step": 446
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.887068888367394e-05,
+ "loss": 0.7618,
+ "step": 447
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864710796317537e-05,
+ "loss": 0.7236,
+ "step": 448
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885871788005122e-05,
+ "loss": 0.7338,
+ "step": 449
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8852710144899948e-05,
+ "loss": 0.7918,
+ "step": 450
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884668760091348e-05,
+ "loss": 0.7942,
+ "step": 451
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8840650258166343e-05,
+ "loss": 0.7012,
+ "step": 452
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8834598126757812e-05,
+ "loss": 0.7231,
+ "step": 453
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8828531216811912e-05,
+ "loss": 0.6792,
+ "step": 454
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882244953847739e-05,
+ "loss": 0.806,
+ "step": 455
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8816353101927686e-05,
+ "loss": 0.7431,
+ "step": 456
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881024191736093e-05,
+ "loss": 0.7726,
+ "step": 457
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8804115994999933e-05,
+ "loss": 0.7167,
+ "step": 458
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797975345092153e-05,
+ "loss": 0.7144,
+ "step": 459
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791819977909682e-05,
+ "loss": 0.7688,
+ "step": 460
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785649903749236e-05,
+ "loss": 0.7409,
+ "step": 461
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8779465132932124e-05,
+ "loss": 0.7538,
+ "step": 462
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877326567580425e-05,
+ "loss": 0.6904,
+ "step": 463
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8767051542736084e-05,
+ "loss": 0.7374,
+ "step": 464
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8760822744122637e-05,
+ "loss": 0.7896,
+ "step": 465
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8754579290383465e-05,
+ "loss": 0.758,
+ "step": 466
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8748321191962625e-05,
+ "loss": 0.7472,
+ "step": 467
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8742048459328684e-05,
+ "loss": 0.6826,
+ "step": 468
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8735761102974684e-05,
+ "loss": 0.8049,
+ "step": 469
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872945913341813e-05,
+ "loss": 0.7312,
+ "step": 470
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8723142561200973e-05,
+ "loss": 0.7134,
+ "step": 471
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.871681139688959e-05,
+ "loss": 0.7707,
+ "step": 472
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710465651074766e-05,
+ "loss": 0.7415,
+ "step": 473
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8704105334371683e-05,
+ "loss": 0.7225,
+ "step": 474
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697730457419893e-05,
+ "loss": 0.7307,
+ "step": 475
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691341030883306e-05,
+ "loss": 0.7629,
+ "step": 476
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868493706545017e-05,
+ "loss": 0.7023,
+ "step": 477
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8678518571833054e-05,
+ "loss": 0.7319,
+ "step": 478
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.867208556076883e-05,
+ "loss": 0.7793,
+ "step": 479
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866563804301866e-05,
+ "loss": 0.7369,
+ "step": 480
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8659176029367957e-05,
+ "loss": 0.7768,
+ "step": 481
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8652699530626398e-05,
+ "loss": 0.7644,
+ "step": 482
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8646208557627888e-05,
+ "loss": 0.7298,
+ "step": 483
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8639703121230542e-05,
+ "loss": 0.818,
+ "step": 484
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8633183232316668e-05,
+ "loss": 0.7176,
+ "step": 485
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626648901792753e-05,
+ "loss": 0.7404,
+ "step": 486
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8620100140589443e-05,
+ "loss": 0.7386,
+ "step": 487
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8613536959661518e-05,
+ "loss": 0.7494,
+ "step": 488
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8606959369987885e-05,
+ "loss": 0.7542,
+ "step": 489
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.860036738257155e-05,
+ "loss": 0.7897,
+ "step": 490
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8593761008439614e-05,
+ "loss": 0.7194,
+ "step": 491
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8587140258643225e-05,
+ "loss": 0.7671,
+ "step": 492
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8580505144257595e-05,
+ "loss": 0.7187,
+ "step": 493
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857385567638196e-05,
+ "loss": 0.7791,
+ "step": 494
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8567191866139564e-05,
+ "loss": 0.7646,
+ "step": 495
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8560513724677644e-05,
+ "loss": 0.7385,
+ "step": 496
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8553821263167417e-05,
+ "loss": 0.7654,
+ "step": 497
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8547114492804042e-05,
+ "loss": 0.7475,
+ "step": 498
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8540393424806627e-05,
+ "loss": 0.7424,
+ "step": 499
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8533658070418186e-05,
+ "loss": 0.7508,
+ "step": 500
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.852690844090564e-05,
+ "loss": 0.7756,
+ "step": 501
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520144547559783e-05,
+ "loss": 0.8042,
+ "step": 502
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513366401695277e-05,
+ "loss": 0.7713,
+ "step": 503
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8506574014650613e-05,
+ "loss": 0.6879,
+ "step": 504
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849976739778812e-05,
+ "loss": 0.7292,
+ "step": 505
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849294656249392e-05,
+ "loss": 0.8054,
+ "step": 506
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8486111520177925e-05,
+ "loss": 0.7433,
+ "step": 507
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8479262282273805e-05,
+ "loss": 0.7858,
+ "step": 508
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8472398860238988e-05,
+ "loss": 0.8021,
+ "step": 509
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8465521265554623e-05,
+ "loss": 0.6885,
+ "step": 510
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8458629509725565e-05,
+ "loss": 0.7997,
+ "step": 511
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845172360428036e-05,
+ "loss": 0.7664,
+ "step": 512
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444803560771222e-05,
+ "loss": 0.7633,
+ "step": 513
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8437869390774022e-05,
+ "loss": 0.7224,
+ "step": 514
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8430921105888254e-05,
+ "loss": 0.7068,
+ "step": 515
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.842395871773702e-05,
+ "loss": 0.7674,
+ "step": 516
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841698223796703e-05,
+ "loss": 0.739,
+ "step": 517
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8409991678248548e-05,
+ "loss": 0.6821,
+ "step": 518
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8402987050275408e-05,
+ "loss": 0.6922,
+ "step": 519
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.839596836576496e-05,
+ "loss": 0.7271,
+ "step": 520
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838893563645808e-05,
+ "loss": 0.6997,
+ "step": 521
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8381888874119135e-05,
+ "loss": 0.7529,
+ "step": 522
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837482809053597e-05,
+ "loss": 0.7828,
+ "step": 523
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8367753297519874e-05,
+ "loss": 0.7104,
+ "step": 524
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360664506905587e-05,
+ "loss": 0.7098,
+ "step": 525
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353561730551243e-05,
+ "loss": 0.7583,
+ "step": 526
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346444980338394e-05,
+ "loss": 0.772,
+ "step": 527
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339314268171953e-05,
+ "loss": 0.7424,
+ "step": 528
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8332169605980193e-05,
+ "loss": 0.7476,
+ "step": 529
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8325011005714726e-05,
+ "loss": 0.6358,
+ "step": 530
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8317838479350473e-05,
+ "loss": 0.7477,
+ "step": 531
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.831065203888565e-05,
+ "loss": 0.777,
+ "step": 532
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830345169634176e-05,
+ "loss": 0.6831,
+ "step": 533
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8296237463763543e-05,
+ "loss": 0.7137,
+ "step": 534
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8289009353218995e-05,
+ "loss": 0.6837,
+ "step": 535
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8281767376799315e-05,
+ "loss": 0.7384,
+ "step": 536
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.82745115466189e-05,
+ "loss": 0.7228,
+ "step": 537
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8267241874815312e-05,
+ "loss": 0.7701,
+ "step": 538
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8259958373549285e-05,
+ "loss": 0.755,
+ "step": 539
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8252661055004672e-05,
+ "loss": 0.7743,
+ "step": 540
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824534993138845e-05,
+ "loss": 0.6656,
+ "step": 541
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823802501493068e-05,
+ "loss": 0.7105,
+ "step": 542
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8230686317884502e-05,
+ "loss": 0.7695,
+ "step": 543
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8223333852526107e-05,
+ "loss": 0.7832,
+ "step": 544
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8215967631154718e-05,
+ "loss": 0.6487,
+ "step": 545
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8208587666092566e-05,
+ "loss": 0.7315,
+ "step": 546
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8201193969684875e-05,
+ "loss": 0.6399,
+ "step": 547
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819378655429984e-05,
+ "loss": 0.763,
+ "step": 548
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8186365432328605e-05,
+ "loss": 0.768,
+ "step": 549
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8178930616185236e-05,
+ "loss": 0.6606,
+ "step": 550
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8171482118306712e-05,
+ "loss": 0.7162,
+ "step": 551
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8164019951152903e-05,
+ "loss": 0.7356,
+ "step": 552
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156544127206536e-05,
+ "loss": 0.6631,
+ "step": 553
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149054658973194e-05,
+ "loss": 0.7329,
+ "step": 554
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.814155155898127e-05,
+ "loss": 0.7678,
+ "step": 555
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134034839781972e-05,
+ "loss": 0.78,
+ "step": 556
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8126504513949287e-05,
+ "loss": 0.7156,
+ "step": 557
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811896059407996e-05,
+ "loss": 0.7617,
+ "step": 558
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811140309279348e-05,
+ "loss": 0.734,
+ "step": 559
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8103832022732056e-05,
+ "loss": 0.76,
+ "step": 560
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8096247396560585e-05,
+ "loss": 0.7267,
+ "step": 561
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8088649226966658e-05,
+ "loss": 0.7378,
+ "step": 562
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8081037526660505e-05,
+ "loss": 0.7397,
+ "step": 563
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8073412308375e-05,
+ "loss": 0.7228,
+ "step": 564
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065773584865623e-05,
+ "loss": 0.799,
+ "step": 565
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805812136891046e-05,
+ "loss": 0.7296,
+ "step": 566
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050455673310142e-05,
+ "loss": 0.7323,
+ "step": 567
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.804277651088787e-05,
+ "loss": 0.7852,
+ "step": 568
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8035083894489364e-05,
+ "loss": 0.7103,
+ "step": 569
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8027377836982857e-05,
+ "loss": 0.7476,
+ "step": 570
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8019658351259056e-05,
+ "loss": 0.7216,
+ "step": 571
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8011925450231135e-05,
+ "loss": 0.7472,
+ "step": 572
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8004179146834712e-05,
+ "loss": 0.7876,
+ "step": 573
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7996419454027828e-05,
+ "loss": 0.7262,
+ "step": 574
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.79886463847909e-05,
+ "loss": 0.7282,
+ "step": 575
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7980859952126755e-05,
+ "loss": 0.7443,
+ "step": 576
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7973060169060546e-05,
+ "loss": 0.7761,
+ "step": 577
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796524704863977e-05,
+ "loss": 0.7091,
+ "step": 578
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795742060393424e-05,
+ "loss": 0.7018,
+ "step": 579
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7949580848036045e-05,
+ "loss": 0.6423,
+ "step": 580
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941727794059553e-05,
+ "loss": 0.7801,
+ "step": 581
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7933861455141365e-05,
+ "loss": 0.6975,
+ "step": 582
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792598184444032e-05,
+ "loss": 0.6777,
+ "step": 583
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7918088975137445e-05,
+ "loss": 0.7582,
+ "step": 584
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7910182860435954e-05,
+ "loss": 0.7618,
+ "step": 585
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7902263513561214e-05,
+ "loss": 0.7453,
+ "step": 586
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7894330947760728e-05,
+ "loss": 0.804,
+ "step": 587
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.788638517630411e-05,
+ "loss": 0.7503,
+ "step": 588
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7878426212483066e-05,
+ "loss": 0.7454,
+ "step": 589
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7870454069611375e-05,
+ "loss": 0.7957,
+ "step": 590
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786246876102485e-05,
+ "loss": 0.6671,
+ "step": 591
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7854470300081338e-05,
+ "loss": 0.7228,
+ "step": 592
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846458700160685e-05,
+ "loss": 0.6408,
+ "step": 593
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838433974664714e-05,
+ "loss": 0.7903,
+ "step": 594
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830396137017202e-05,
+ "loss": 0.7714,
+ "step": 595
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7822345200663874e-05,
+ "loss": 0.7496,
+ "step": 596
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7814281179072345e-05,
+ "loss": 0.7178,
+ "step": 597
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.780620408573213e-05,
+ "loss": 0.6921,
+ "step": 598
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7798113934154623e-05,
+ "loss": 0.7662,
+ "step": 599
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7790010737873035e-05,
+ "loss": 0.6984,
+ "step": 600
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778189451044242e-05,
+ "loss": 0.7438,
+ "step": 601
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777376526543962e-05,
+ "loss": 0.7932,
+ "step": 602
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776562301646326e-05,
+ "loss": 0.7217,
+ "step": 603
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.775746777713371e-05,
+ "loss": 0.7473,
+ "step": 604
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7749299561093076e-05,
+ "loss": 0.7434,
+ "step": 605
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7741118382005172e-05,
+ "loss": 0.745,
+ "step": 606
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732924253555487e-05,
+ "loss": 0.6984,
+ "step": 607
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.772471718945119e-05,
+ "loss": 0.6934,
+ "step": 608
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7716497203421072e-05,
+ "loss": 0.7077,
+ "step": 609
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7708264309215543e-05,
+ "loss": 0.6832,
+ "step": 610
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770001852060661e-05,
+ "loss": 0.7333,
+ "step": 611
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7691759851387852e-05,
+ "loss": 0.7715,
+ "step": 612
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7683488315374386e-05,
+ "loss": 0.6889,
+ "step": 613
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7675203926402863e-05,
+ "loss": 0.7479,
+ "step": 614
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7666906698331426e-05,
+ "loss": 0.7633,
+ "step": 615
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76585966450397e-05,
+ "loss": 0.671,
+ "step": 616
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765027378042876e-05,
+ "loss": 0.668,
+ "step": 617
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.764193811842112e-05,
+ "loss": 0.7501,
+ "step": 618
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763358967296069e-05,
+ "loss": 0.7715,
+ "step": 619
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.762522845801277e-05,
+ "loss": 0.7788,
+ "step": 620
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7616854487564024e-05,
+ "loss": 0.7613,
+ "step": 621
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7608467775622448e-05,
+ "loss": 0.676,
+ "step": 622
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600068336217355e-05,
+ "loss": 0.7353,
+ "step": 623
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759165618339935e-05,
+ "loss": 0.7318,
+ "step": 624
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7583231331240297e-05,
+ "loss": 0.7359,
+ "step": 625
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7574793793833312e-05,
+ "loss": 0.7538,
+ "step": 626
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7566343585292732e-05,
+ "loss": 0.7741,
+ "step": 627
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7557880719754082e-05,
+ "loss": 0.6738,
+ "step": 628
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7549405211374072e-05,
+ "loss": 0.7337,
+ "step": 629
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7540917074330543e-05,
+ "loss": 0.7748,
+ "step": 630
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7532416322822473e-05,
+ "loss": 0.7729,
+ "step": 631
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7523902971069944e-05,
+ "loss": 0.6939,
+ "step": 632
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515377033314113e-05,
+ "loss": 0.7184,
+ "step": 633
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7506838523817183e-05,
+ "loss": 0.8101,
+ "step": 634
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7498287456862398e-05,
+ "loss": 0.7527,
+ "step": 635
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7489723846754e-05,
+ "loss": 0.6881,
+ "step": 636
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7481147707817227e-05,
+ "loss": 0.6766,
+ "step": 637
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7472559054398256e-05,
+ "loss": 0.7568,
+ "step": 638
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.746395790086421e-05,
+ "loss": 0.6968,
+ "step": 639
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7455344261603116e-05,
+ "loss": 0.6614,
+ "step": 640
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7446718151023894e-05,
+ "loss": 0.7461,
+ "step": 641
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7438079583556326e-05,
+ "loss": 0.7077,
+ "step": 642
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7429428573651023e-05,
+ "loss": 0.6636,
+ "step": 643
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.742076513577942e-05,
+ "loss": 0.723,
+ "step": 644
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7412089284433734e-05,
+ "loss": 0.72,
+ "step": 645
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403401034126954e-05,
+ "loss": 0.7399,
+ "step": 646
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394700399392804e-05,
+ "loss": 0.7651,
+ "step": 647
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7385987394785723e-05,
+ "loss": 0.7938,
+ "step": 648
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377262034880854e-05,
+ "loss": 0.7333,
+ "step": 649
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7368524334273998e-05,
+ "loss": 0.7182,
+ "step": 650
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7359774307581602e-05,
+ "loss": 0.7265,
+ "step": 651
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7351011969440728e-05,
+ "loss": 0.7842,
+ "step": 652
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.734223733450904e-05,
+ "loss": 0.7575,
+ "step": 653
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733345041746477e-05,
+ "loss": 0.7225,
+ "step": 654
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.732465123300669e-05,
+ "loss": 0.6798,
+ "step": 655
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7315839795854103e-05,
+ "loss": 0.7377,
+ "step": 656
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.73070161207468e-05,
+ "loss": 0.7426,
+ "step": 657
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7298180222445043e-05,
+ "loss": 0.7489,
+ "step": 658
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289332115729554e-05,
+ "loss": 0.7117,
+ "step": 659
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280471815401453e-05,
+ "loss": 0.6949,
+ "step": 660
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271599336282285e-05,
+ "loss": 0.7434,
+ "step": 661
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7262714693213948e-05,
+ "loss": 0.7,
+ "step": 662
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7253817901058698e-05,
+ "loss": 0.7897,
+ "step": 663
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7244908974699112e-05,
+ "loss": 0.665,
+ "step": 664
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7235987929038056e-05,
+ "loss": 0.7106,
+ "step": 665
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7227054778998682e-05,
+ "loss": 0.7,
+ "step": 666
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.721810953952439e-05,
+ "loss": 0.7304,
+ "step": 667
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720915222557879e-05,
+ "loss": 0.7114,
+ "step": 668
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7200182852145705e-05,
+ "loss": 0.6308,
+ "step": 669
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7191201434229118e-05,
+ "loss": 0.7222,
+ "step": 670
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7182207986853176e-05,
+ "loss": 0.7566,
+ "step": 671
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7173202525062133e-05,
+ "loss": 0.6974,
+ "step": 672
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716418506392035e-05,
+ "loss": 0.7604,
+ "step": 673
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155155618512255e-05,
+ "loss": 0.7254,
+ "step": 674
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146114203942332e-05,
+ "loss": 0.7536,
+ "step": 675
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137060835335076e-05,
+ "loss": 0.7885,
+ "step": 676
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712799552783499e-05,
+ "loss": 0.742,
+ "step": 677
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7118918296606536e-05,
+ "loss": 0.7233,
+ "step": 678
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7109829156834133e-05,
+ "loss": 0.6665,
+ "step": 679
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7100728123722115e-05,
+ "loss": 0.7311,
+ "step": 680
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7091615212494714e-05,
+ "loss": 0.7532,
+ "step": 681
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7082490438396027e-05,
+ "loss": 0.7864,
+ "step": 682
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707335381669e-05,
+ "loss": 0.758,
+ "step": 683
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7064205362660397e-05,
+ "loss": 0.7534,
+ "step": 684
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7055045091610772e-05,
+ "loss": 0.7932,
+ "step": 685
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7045873018864452e-05,
+ "loss": 0.668,
+ "step": 686
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70366891597645e-05,
+ "loss": 0.7377,
+ "step": 687
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70274935296737e-05,
+ "loss": 0.7681,
+ "step": 688
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.701828614397452e-05,
+ "loss": 0.7527,
+ "step": 689
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7009067018069106e-05,
+ "loss": 0.648,
+ "step": 690
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6999836167379228e-05,
+ "loss": 0.7046,
+ "step": 691
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6990593607346276e-05,
+ "loss": 0.7357,
+ "step": 692
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.698133935343123e-05,
+ "loss": 0.7379,
+ "step": 693
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6972073421114626e-05,
+ "loss": 0.7167,
+ "step": 694
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6962795825896537e-05,
+ "loss": 0.6965,
+ "step": 695
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695350658329655e-05,
+ "loss": 0.774,
+ "step": 696
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6944205708853735e-05,
+ "loss": 0.7159,
+ "step": 697
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6934893218126607e-05,
+ "loss": 0.7091,
+ "step": 698
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925569126693135e-05,
+ "loss": 0.7046,
+ "step": 699
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6916233450150675e-05,
+ "loss": 0.759,
+ "step": 700
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6906886204115974e-05,
+ "loss": 0.7444,
+ "step": 701
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6897527404225128e-05,
+ "loss": 0.7198,
+ "step": 702
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6888157066133555e-05,
+ "loss": 0.7851,
+ "step": 703
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6878775205515983e-05,
+ "loss": 0.705,
+ "step": 704
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6869381838066412e-05,
+ "loss": 0.7089,
+ "step": 705
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.685997697949809e-05,
+ "loss": 0.7445,
+ "step": 706
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6850560645543485e-05,
+ "loss": 0.7166,
+ "step": 707
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684113285195427e-05,
+ "loss": 0.7729,
+ "step": 708
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6831693614501272e-05,
+ "loss": 0.6822,
+ "step": 709
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6822242948974475e-05,
+ "loss": 0.7347,
+ "step": 710
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6812780871182976e-05,
+ "loss": 0.6116,
+ "step": 711
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6803307396954962e-05,
+ "loss": 0.7191,
+ "step": 712
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793822542137683e-05,
+ "loss": 0.7065,
+ "step": 713
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6784326322597427e-05,
+ "loss": 0.695,
+ "step": 714
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6774818754219495e-05,
+ "loss": 0.7195,
+ "step": 715
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.676529985290817e-05,
+ "loss": 0.6301,
+ "step": 716
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675576963458669e-05,
+ "loss": 0.7644,
+ "step": 717
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674622811519723e-05,
+ "loss": 0.7535,
+ "step": 718
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673667531070087e-05,
+ "loss": 0.8186,
+ "step": 719
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672711123707756e-05,
+ "loss": 0.7972,
+ "step": 720
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6717535910326108e-05,
+ "loss": 0.7055,
+ "step": 721
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6707949346464147e-05,
+ "loss": 0.74,
+ "step": 722
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.66983515615281e-05,
+ "loss": 0.7038,
+ "step": 723
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6688742571573167e-05,
+ "loss": 0.774,
+ "step": 724
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6679122392673284e-05,
+ "loss": 0.6854,
+ "step": 725
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6669491040921123e-05,
+ "loss": 0.6726,
+ "step": 726
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665984853242802e-05,
+ "loss": 0.7562,
+ "step": 727
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650194883323995e-05,
+ "loss": 0.6689,
+ "step": 728
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664053010975769e-05,
+ "loss": 0.8053,
+ "step": 729
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6630854227896364e-05,
+ "loss": 0.7467,
+ "step": 730
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6621167253925857e-05,
+ "loss": 0.7171,
+ "step": 731
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6611469204050566e-05,
+ "loss": 0.7291,
+ "step": 732
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6601760094493408e-05,
+ "loss": 0.7419,
+ "step": 733
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6592039941495803e-05,
+ "loss": 0.7165,
+ "step": 734
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6582308761317652e-05,
+ "loss": 0.7541,
+ "step": 735
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6572566570237293e-05,
+ "loss": 0.692,
+ "step": 736
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656281338455149e-05,
+ "loss": 0.703,
+ "step": 737
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6553049220575392e-05,
+ "loss": 0.7892,
+ "step": 738
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6543274094642522e-05,
+ "loss": 0.7247,
+ "step": 739
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6533488023104732e-05,
+ "loss": 0.7972,
+ "step": 740
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6523691022332184e-05,
+ "loss": 0.6874,
+ "step": 741
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6513883108713326e-05,
+ "loss": 0.8073,
+ "step": 742
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6504064298654864e-05,
+ "loss": 0.7439,
+ "step": 743
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6494234608581722e-05,
+ "loss": 0.7338,
+ "step": 744
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6484394054937033e-05,
+ "loss": 0.692,
+ "step": 745
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64745426541821e-05,
+ "loss": 0.7688,
+ "step": 746
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6464680422796367e-05,
+ "loss": 0.6845,
+ "step": 747
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64548073772774e-05,
+ "loss": 0.7207,
+ "step": 748
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6444923534140856e-05,
+ "loss": 0.6852,
+ "step": 749
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.643502890992045e-05,
+ "loss": 0.817,
+ "step": 750
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.642512352116793e-05,
+ "loss": 0.7343,
+ "step": 751
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641520738445306e-05,
+ "loss": 0.7457,
+ "step": 752
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405280516363576e-05,
+ "loss": 0.6987,
+ "step": 753
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.639534293350517e-05,
+ "loss": 0.725,
+ "step": 754
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6385394652501444e-05,
+ "loss": 0.7093,
+ "step": 755
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.637543568999392e-05,
+ "loss": 0.7178,
+ "step": 756
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6365466062641974e-05,
+ "loss": 0.7102,
+ "step": 757
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6355485787122814e-05,
+ "loss": 0.8084,
+ "step": 758
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6345494880131476e-05,
+ "loss": 0.6814,
+ "step": 759
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.633549335838077e-05,
+ "loss": 0.7084,
+ "step": 760
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.632548123860127e-05,
+ "loss": 0.6958,
+ "step": 761
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6315458537541272e-05,
+ "loss": 0.6878,
+ "step": 762
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6305425271966772e-05,
+ "loss": 0.7523,
+ "step": 763
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6295381458661447e-05,
+ "loss": 0.6973,
+ "step": 764
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6285327114426608e-05,
+ "loss": 0.7219,
+ "step": 765
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627526225608119e-05,
+ "loss": 0.7283,
+ "step": 766
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265186900461715e-05,
+ "loss": 0.6766,
+ "step": 767
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255101064422252e-05,
+ "loss": 0.612,
+ "step": 768
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6245004764834423e-05,
+ "loss": 0.6537,
+ "step": 769
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6234898018587336e-05,
+ "loss": 0.7585,
+ "step": 770
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6224780842587586e-05,
+ "loss": 0.7195,
+ "step": 771
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6214653253759206e-05,
+ "loss": 0.7204,
+ "step": 772
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6204515269043653e-05,
+ "loss": 0.7923,
+ "step": 773
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619436690539977e-05,
+ "loss": 0.7354,
+ "step": 774
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6184208179803772e-05,
+ "loss": 0.7421,
+ "step": 775
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617403910924919e-05,
+ "loss": 0.6847,
+ "step": 776
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6163859710746878e-05,
+ "loss": 0.7252,
+ "step": 777
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153670001324955e-05,
+ "loss": 0.6994,
+ "step": 778
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143469998028792e-05,
+ "loss": 0.7239,
+ "step": 779
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613325971792098e-05,
+ "loss": 0.6799,
+ "step": 780
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123039178081303e-05,
+ "loss": 0.7778,
+ "step": 781
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61128083956067e-05,
+ "loss": 0.7374,
+ "step": 782
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.610256738761125e-05,
+ "loss": 0.7459,
+ "step": 783
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609231617122614e-05,
+ "loss": 0.728,
+ "step": 784
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6082054763599627e-05,
+ "loss": 0.7002,
+ "step": 785
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607178318189702e-05,
+ "loss": 0.7573,
+ "step": 786
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6061501443300644e-05,
+ "loss": 0.7594,
+ "step": 787
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605120956500982e-05,
+ "loss": 0.677,
+ "step": 788
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6040907564240823e-05,
+ "loss": 0.7935,
+ "step": 789
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6030595458226872e-05,
+ "loss": 0.6905,
+ "step": 790
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6020273264218076e-05,
+ "loss": 0.6745,
+ "step": 791
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600994099948143e-05,
+ "loss": 0.6999,
+ "step": 792
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5999598681300766e-05,
+ "loss": 0.7516,
+ "step": 793
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989246326976746e-05,
+ "loss": 0.6977,
+ "step": 794
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5978883953826814e-05,
+ "loss": 0.7142,
+ "step": 795
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.596851157918517e-05,
+ "loss": 0.7289,
+ "step": 796
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5958129220402745e-05,
+ "loss": 0.7241,
+ "step": 797
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.594773689484718e-05,
+ "loss": 0.7022,
+ "step": 798
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593733461990278e-05,
+ "loss": 0.7725,
+ "step": 799
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5926922412970495e-05,
+ "loss": 0.6624,
+ "step": 800
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5916500291467892e-05,
+ "loss": 0.7554,
+ "step": 801
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5906068272829122e-05,
+ "loss": 0.7506,
+ "step": 802
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.589562637450489e-05,
+ "loss": 0.6617,
+ "step": 803
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5885174613962427e-05,
+ "loss": 0.8092,
+ "step": 804
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874713008685464e-05,
+ "loss": 0.7618,
+ "step": 805
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5864241576174205e-05,
+ "loss": 0.672,
+ "step": 806
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5853760333945282e-05,
+ "loss": 0.6711,
+ "step": 807
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5843269299531748e-05,
+ "loss": 0.6877,
+ "step": 808
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.583276849048303e-05,
+ "loss": 0.7301,
+ "step": 809
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5822257924364902e-05,
+ "loss": 0.686,
+ "step": 810
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581173761875947e-05,
+ "loss": 0.6622,
+ "step": 811
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580120759126512e-05,
+ "loss": 0.7539,
+ "step": 812
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579066785949652e-05,
+ "loss": 0.6792,
+ "step": 813
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5780118441084544e-05,
+ "loss": 0.7293,
+ "step": 814
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5769559353676297e-05,
+ "loss": 0.725,
+ "step": 815
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5758990614935042e-05,
+ "loss": 0.69,
+ "step": 816
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5748412242540193e-05,
+ "loss": 0.7735,
+ "step": 817
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737824254187275e-05,
+ "loss": 0.7212,
+ "step": 818
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727226667587898e-05,
+ "loss": 0.7271,
+ "step": 819
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716619500469735e-05,
+ "loss": 0.7178,
+ "step": 820
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706002770576478e-05,
+ "loss": 0.755,
+ "step": 821
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5695376495667828e-05,
+ "loss": 0.7232,
+ "step": 822
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568474069351943e-05,
+ "loss": 0.7688,
+ "step": 823
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5674095381922886e-05,
+ "loss": 0.7791,
+ "step": 824
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5663440578685706e-05,
+ "loss": 0.7295,
+ "step": 825
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565277630163126e-05,
+ "loss": 0.7026,
+ "step": 826
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564210256859878e-05,
+ "loss": 0.7634,
+ "step": 827
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.563141939744332e-05,
+ "loss": 0.6542,
+ "step": 828
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56207268060357e-05,
+ "loss": 0.7608,
+ "step": 829
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5610024812262526e-05,
+ "loss": 0.6642,
+ "step": 830
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559931343402612e-05,
+ "loss": 0.7594,
+ "step": 831
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588592689244492e-05,
+ "loss": 0.803,
+ "step": 832
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5577862595851335e-05,
+ "loss": 0.7025,
+ "step": 833
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.556712317179598e-05,
+ "loss": 0.7071,
+ "step": 834
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5556374435043353e-05,
+ "loss": 0.7147,
+ "step": 835
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5545616403573977e-05,
+ "loss": 0.7378,
+ "step": 836
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5534849095383902e-05,
+ "loss": 0.7308,
+ "step": 837
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5524072528484716e-05,
+ "loss": 0.7065,
+ "step": 838
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5513286720903488e-05,
+ "loss": 0.6893,
+ "step": 839
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5502491690682732e-05,
+ "loss": 0.7197,
+ "step": 840
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5491687455880414e-05,
+ "loss": 0.6742,
+ "step": 841
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5480874034569876e-05,
+ "loss": 0.7141,
+ "step": 842
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547005144483984e-05,
+ "loss": 0.6902,
+ "step": 843
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5459219704794358e-05,
+ "loss": 0.6097,
+ "step": 844
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5448378832552796e-05,
+ "loss": 0.7316,
+ "step": 845
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437528846249783e-05,
+ "loss": 0.7249,
+ "step": 846
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542666976403521e-05,
+ "loss": 0.6905,
+ "step": 847
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.541580160407417e-05,
+ "loss": 0.7308,
+ "step": 848
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5404924384546955e-05,
+ "loss": 0.7789,
+ "step": 849
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5394038123649e-05,
+ "loss": 0.6475,
+ "step": 850
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5383142839590872e-05,
+ "loss": 0.7723,
+ "step": 851
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5372238550598225e-05,
+ "loss": 0.6571,
+ "step": 852
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536132527491178e-05,
+ "loss": 0.6882,
+ "step": 853
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53504030307873e-05,
+ "loss": 0.7439,
+ "step": 854
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5339471836495524e-05,
+ "loss": 0.7451,
+ "step": 855
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5328531710322198e-05,
+ "loss": 0.7189,
+ "step": 856
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5317582670567986e-05,
+ "loss": 0.7362,
+ "step": 857
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5306624735548465e-05,
+ "loss": 0.7175,
+ "step": 858
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52956579235941e-05,
+ "loss": 0.7471,
+ "step": 859
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52846822530502e-05,
+ "loss": 0.7288,
+ "step": 860
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527369774227689e-05,
+ "loss": 0.7794,
+ "step": 861
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.526270440964909e-05,
+ "loss": 0.7213,
+ "step": 862
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5251702273556475e-05,
+ "loss": 0.7363,
+ "step": 863
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5240691352403441e-05,
+ "loss": 0.7481,
+ "step": 864
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5229671664609088e-05,
+ "loss": 0.7633,
+ "step": 865
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521864322860717e-05,
+ "loss": 0.6936,
+ "step": 866
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5207606062846092e-05,
+ "loss": 0.7934,
+ "step": 867
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5196560185788842e-05,
+ "loss": 0.6964,
+ "step": 868
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5185505615912998e-05,
+ "loss": 0.7197,
+ "step": 869
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.517444237171067e-05,
+ "loss": 0.7654,
+ "step": 870
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5163370471688471e-05,
+ "loss": 0.7399,
+ "step": 871
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152289934367516e-05,
+ "loss": 0.6532,
+ "step": 872
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141200778283345e-05,
+ "loss": 0.6571,
+ "step": 873
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5130103021985929e-05,
+ "loss": 0.7925,
+ "step": 874
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5118996684039621e-05,
+ "loss": 0.7207,
+ "step": 875
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5107881783023138e-05,
+ "loss": 0.6658,
+ "step": 876
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50967583375295e-05,
+ "loss": 0.7332,
+ "step": 877
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5085626366166039e-05,
+ "loss": 0.7581,
+ "step": 878
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5074485887554351e-05,
+ "loss": 0.678,
+ "step": 879
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5063336920330244e-05,
+ "loss": 0.7198,
+ "step": 880
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5052179483143752e-05,
+ "loss": 0.7532,
+ "step": 881
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5041013594659051e-05,
+ "loss": 0.7674,
+ "step": 882
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5029839273554469e-05,
+ "loss": 0.7071,
+ "step": 883
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5018656538522442e-05,
+ "loss": 0.7367,
+ "step": 884
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5007465408269477e-05,
+ "loss": 0.6277,
+ "step": 885
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996265901516116e-05,
+ "loss": 0.6893,
+ "step": 886
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985058036996929e-05,
+ "loss": 0.7545,
+ "step": 887
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4973841833460457e-05,
+ "loss": 0.6983,
+ "step": 888
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4962617309669192e-05,
+ "loss": 0.7104,
+ "step": 889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4951384484399542e-05,
+ "loss": 0.6923,
+ "step": 890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4940143376441812e-05,
+ "loss": 0.7101,
+ "step": 891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.492889400460015e-05,
+ "loss": 0.6884,
+ "step": 892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.491763638769253e-05,
+ "loss": 0.7124,
+ "step": 893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.490637054455073e-05,
+ "loss": 0.6982,
+ "step": 894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4895096494020274e-05,
+ "loss": 0.7001,
+ "step": 895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4883814254960418e-05,
+ "loss": 0.7201,
+ "step": 896
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4872523846244127e-05,
+ "loss": 0.7053,
+ "step": 897
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4861225286758018e-05,
+ "loss": 0.7871,
+ "step": 898
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4849918595402353e-05,
+ "loss": 0.701,
+ "step": 899
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4838603791090989e-05,
+ "loss": 0.7282,
+ "step": 900
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4827280892751364e-05,
+ "loss": 0.6271,
+ "step": 901
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4815949919324443e-05,
+ "loss": 0.7225,
+ "step": 902
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4804610889764712e-05,
+ "loss": 0.7286,
+ "step": 903
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4793263823040127e-05,
+ "loss": 0.7763,
+ "step": 904
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4781908738132085e-05,
+ "loss": 0.6973,
+ "step": 905
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4770545654035403e-05,
+ "loss": 0.7188,
+ "step": 906
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4759174589758272e-05,
+ "loss": 0.6782,
+ "step": 907
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4747795564322243e-05,
+ "loss": 0.7378,
+ "step": 908
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473640859676217e-05,
+ "loss": 0.6813,
+ "step": 909
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4725013706126207e-05,
+ "loss": 0.7261,
+ "step": 910
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471361091147575e-05,
+ "loss": 0.7461,
+ "step": 911
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702200231885425e-05,
+ "loss": 0.7554,
+ "step": 912
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4690781686443044e-05,
+ "loss": 0.6786,
+ "step": 913
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4679355294249575e-05,
+ "loss": 0.7049,
+ "step": 914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4667921074419118e-05,
+ "loss": 0.7296,
+ "step": 915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4656479046078862e-05,
+ "loss": 0.6927,
+ "step": 916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4645029228369062e-05,
+ "loss": 0.7288,
+ "step": 917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4633571640442998e-05,
+ "loss": 0.7358,
+ "step": 918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4622106301466956e-05,
+ "loss": 0.7088,
+ "step": 919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4610633230620179e-05,
+ "loss": 0.7285,
+ "step": 920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4599152447094846e-05,
+ "loss": 0.6971,
+ "step": 921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4587663970096048e-05,
+ "loss": 0.6598,
+ "step": 922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4576167818841732e-05,
+ "loss": 0.8311,
+ "step": 923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564664012562686e-05,
+ "loss": 0.7618,
+ "step": 924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4553152570502506e-05,
+ "loss": 0.715,
+ "step": 925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541633511917567e-05,
+ "loss": 0.6876,
+ "step": 926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.453010685607697e-05,
+ "loss": 0.7424,
+ "step": 927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.451857262226254e-05,
+ "loss": 0.697,
+ "step": 928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4507030829768765e-05,
+ "loss": 0.7492,
+ "step": 929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4495481497902789e-05,
+ "loss": 0.7389,
+ "step": 930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4483924645984359e-05,
+ "loss": 0.7214,
+ "step": 931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4472360293345807e-05,
+ "loss": 0.7642,
+ "step": 932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4460788459332011e-05,
+ "loss": 0.6999,
+ "step": 933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.444920916330036e-05,
+ "loss": 0.6734,
+ "step": 934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4437622424620733e-05,
+ "loss": 0.6938,
+ "step": 935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4426028262675451e-05,
+ "loss": 0.715,
+ "step": 936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441442669685926e-05,
+ "loss": 0.7669,
+ "step": 937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440281774657929e-05,
+ "loss": 0.655,
+ "step": 938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391201431255014e-05,
+ "loss": 0.7152,
+ "step": 939
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4379577770318234e-05,
+ "loss": 0.6666,
+ "step": 940
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.436794678321304e-05,
+ "loss": 0.7262,
+ "step": 941
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4356308489395783e-05,
+ "loss": 0.7135,
+ "step": 942
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4344662908335018e-05,
+ "loss": 0.7154,
+ "step": 943
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4333010059511505e-05,
+ "loss": 0.7104,
+ "step": 944
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4321349962418163e-05,
+ "loss": 0.7124,
+ "step": 945
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4309682636560027e-05,
+ "loss": 0.663,
+ "step": 946
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4298008101454226e-05,
+ "loss": 0.7207,
+ "step": 947
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4286326376629956e-05,
+ "loss": 0.717,
+ "step": 948
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4274637481628431e-05,
+ "loss": 0.7342,
+ "step": 949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4262941436002862e-05,
+ "loss": 0.7483,
+ "step": 950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.425123825931843e-05,
+ "loss": 0.727,
+ "step": 951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.423952797115223e-05,
+ "loss": 0.7015,
+ "step": 952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4227810591093262e-05,
+ "loss": 0.6763,
+ "step": 953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216086138742395e-05,
+ "loss": 0.7539,
+ "step": 954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4204354633712312e-05,
+ "loss": 0.6542,
+ "step": 955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4192616095627506e-05,
+ "loss": 0.7409,
+ "step": 956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4180870544124236e-05,
+ "loss": 0.7172,
+ "step": 957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416911799885049e-05,
+ "loss": 0.7727,
+ "step": 958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4157358479465949e-05,
+ "loss": 0.7572,
+ "step": 959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4145592005641967e-05,
+ "loss": 0.7763,
+ "step": 960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.413381859706153e-05,
+ "loss": 0.6591,
+ "step": 961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412203827341923e-05,
+ "loss": 0.7304,
+ "step": 962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4110251054421218e-05,
+ "loss": 0.6755,
+ "step": 963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4098456959785178e-05,
+ "loss": 0.7126,
+ "step": 964
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4086656009240308e-05,
+ "loss": 0.6761,
+ "step": 965
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4074848222527262e-05,
+ "loss": 0.767,
+ "step": 966
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4063033619398136e-05,
+ "loss": 0.7824,
+ "step": 967
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4051212219616427e-05,
+ "loss": 0.7087,
+ "step": 968
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4039384042957003e-05,
+ "loss": 0.7141,
+ "step": 969
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4027549109206064e-05,
+ "loss": 0.7807,
+ "step": 970
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4015707438161122e-05,
+ "loss": 0.6721,
+ "step": 971
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4003859049630943e-05,
+ "loss": 0.7043,
+ "step": 972
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.399200396343555e-05,
+ "loss": 0.7051,
+ "step": 973
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3980142199406155e-05,
+ "loss": 0.6759,
+ "step": 974
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3968273777385146e-05,
+ "loss": 0.7316,
+ "step": 975
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3956398717226055e-05,
+ "loss": 0.6878,
+ "step": 976
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.39445170387935e-05,
+ "loss": 0.6914,
+ "step": 977
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3932628761963192e-05,
+ "loss": 0.7557,
+ "step": 978
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3920733906621861e-05,
+ "loss": 0.7348,
+ "step": 979
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3908832492667261e-05,
+ "loss": 0.6301,
+ "step": 980
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3896924540008098e-05,
+ "loss": 0.7632,
+ "step": 981
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3885010068564026e-05,
+ "loss": 0.6808,
+ "step": 982
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3873089098265606e-05,
+ "loss": 0.7659,
+ "step": 983
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.386116164905426e-05,
+ "loss": 0.7679,
+ "step": 984
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3849227740882257e-05,
+ "loss": 0.6761,
+ "step": 985
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3837287393712666e-05,
+ "loss": 0.7035,
+ "step": 986
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3825340627519337e-05,
+ "loss": 0.7478,
+ "step": 987
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3813387462286839e-05,
+ "loss": 0.7541,
+ "step": 988
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3801427918010461e-05,
+ "loss": 0.7886,
+ "step": 989
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.378946201469616e-05,
+ "loss": 0.7393,
+ "step": 990
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3777489772360521e-05,
+ "loss": 0.7486,
+ "step": 991
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3765511211030753e-05,
+ "loss": 0.7444,
+ "step": 992
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3753526350744611e-05,
+ "loss": 0.815,
+ "step": 993
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3741535211550402e-05,
+ "loss": 0.7968,
+ "step": 994
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3729537813506932e-05,
+ "loss": 0.6658,
+ "step": 995
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3717534176683485e-05,
+ "loss": 0.6949,
+ "step": 996
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3705524321159764e-05,
+ "loss": 0.789,
+ "step": 997
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.369350826702589e-05,
+ "loss": 0.6814,
+ "step": 998
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3681486034382347e-05,
+ "loss": 0.7266,
+ "step": 999
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3669457643339956e-05,
+ "loss": 0.6275,
+ "step": 1000
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3657423114019837e-05,
+ "loss": 0.7283,
+ "step": 1001
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3645382466553381e-05,
+ "loss": 0.7593,
+ "step": 1002
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3633335721082215e-05,
+ "loss": 0.6983,
+ "step": 1003
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.362128289775816e-05,
+ "loss": 0.6981,
+ "step": 1004
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3609224016743213e-05,
+ "loss": 0.771,
+ "step": 1005
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3597159098209493e-05,
+ "loss": 0.7285,
+ "step": 1006
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3585088162339233e-05,
+ "loss": 0.7484,
+ "step": 1007
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.357301122932472e-05,
+ "loss": 0.7154,
+ "step": 1008
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3560928319368278e-05,
+ "loss": 0.6834,
+ "step": 1009
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3548839452682225e-05,
+ "loss": 0.6977,
+ "step": 1010
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.353674464948885e-05,
+ "loss": 0.7003,
+ "step": 1011
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3524643930020374e-05,
+ "loss": 0.6982,
+ "step": 1012
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.35125373145189e-05,
+ "loss": 0.7748,
+ "step": 1013
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3500424823236413e-05,
+ "loss": 0.6627,
+ "step": 1014
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3488306476434714e-05,
+ "loss": 0.7418,
+ "step": 1015
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3476182294385398e-05,
+ "loss": 0.8061,
+ "step": 1016
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3464052297369838e-05,
+ "loss": 0.7103,
+ "step": 1017
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3451916505679115e-05,
+ "loss": 0.6843,
+ "step": 1018
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.343977493961401e-05,
+ "loss": 0.7491,
+ "step": 1019
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3427627619484968e-05,
+ "loss": 0.7352,
+ "step": 1020
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3415474565612059e-05,
+ "loss": 0.7295,
+ "step": 1021
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3403315798324935e-05,
+ "loss": 0.6246,
+ "step": 1022
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3391151337962813e-05,
+ "loss": 0.6958,
+ "step": 1023
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3378981204874432e-05,
+ "loss": 0.6741,
+ "step": 1024
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3366805419418026e-05,
+ "loss": 0.6437,
+ "step": 1025
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3354624001961267e-05,
+ "loss": 0.6977,
+ "step": 1026
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3342436972881272e-05,
+ "loss": 0.6561,
+ "step": 1027
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3330244352564527e-05,
+ "loss": 0.723,
+ "step": 1028
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3318046161406876e-05,
+ "loss": 0.6622,
+ "step": 1029
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3305842419813488e-05,
+ "loss": 0.7447,
+ "step": 1030
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3293633148198812e-05,
+ "loss": 0.6803,
+ "step": 1031
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3281418366986535e-05,
+ "loss": 0.6453,
+ "step": 1032
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3269198096609592e-05,
+ "loss": 0.7161,
+ "step": 1033
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3256972357510068e-05,
+ "loss": 0.7622,
+ "step": 1034
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.324474117013921e-05,
+ "loss": 0.5909,
+ "step": 1035
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3232504554957385e-05,
+ "loss": 0.7417,
+ "step": 1036
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.322026253243403e-05,
+ "loss": 0.7303,
+ "step": 1037
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3208015123047623e-05,
+ "loss": 0.7166,
+ "step": 1038
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.319576234728567e-05,
+ "loss": 0.6573,
+ "step": 1039
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3183504225644643e-05,
+ "loss": 0.7253,
+ "step": 1040
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3171240778629952e-05,
+ "loss": 0.7104,
+ "step": 1041
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3158972026755927e-05,
+ "loss": 0.6529,
+ "step": 1042
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3146697990545762e-05,
+ "loss": 0.6765,
+ "step": 1043
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3134418690531498e-05,
+ "loss": 0.7247,
+ "step": 1044
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3122134147253975e-05,
+ "loss": 0.7753,
+ "step": 1045
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3109844381262816e-05,
+ "loss": 0.7095,
+ "step": 1046
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3097549413116358e-05,
+ "loss": 0.7144,
+ "step": 1047
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3085249263381661e-05,
+ "loss": 0.7606,
+ "step": 1048
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3072943952634446e-05,
+ "loss": 0.7129,
+ "step": 1049
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3060633501459065e-05,
+ "loss": 0.647,
+ "step": 1050
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3048317930448472e-05,
+ "loss": 0.726,
+ "step": 1051
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3035997260204187e-05,
+ "loss": 0.8003,
+ "step": 1052
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3023671511336253e-05,
+ "loss": 0.6686,
+ "step": 1053
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.301134070446321e-05,
+ "loss": 0.7307,
+ "step": 1054
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2999004860212071e-05,
+ "loss": 0.6483,
+ "step": 1055
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2986663999218264e-05,
+ "loss": 0.7434,
+ "step": 1056
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2974318142125604e-05,
+ "loss": 0.5992,
+ "step": 1057
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2961967309586279e-05,
+ "loss": 0.7833,
+ "step": 1058
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2949611522260788e-05,
+ "loss": 0.7262,
+ "step": 1059
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2937250800817922e-05,
+ "loss": 0.6231,
+ "step": 1060
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2924885165934728e-05,
+ "loss": 0.6158,
+ "step": 1061
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2912514638296472e-05,
+ "loss": 0.7494,
+ "step": 1062
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2900139238596598e-05,
+ "loss": 0.761,
+ "step": 1063
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.288775898753671e-05,
+ "loss": 0.7723,
+ "step": 1064
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.287537390582652e-05,
+ "loss": 0.6906,
+ "step": 1065
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.286298401418382e-05,
+ "loss": 0.6849,
+ "step": 1066
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2850589333334455e-05,
+ "loss": 0.6143,
+ "step": 1067
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2838189884012275e-05,
+ "loss": 0.7147,
+ "step": 1068
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2825785686959113e-05,
+ "loss": 0.7597,
+ "step": 1069
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2813376762924735e-05,
+ "loss": 0.7187,
+ "step": 1070
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2800963132666825e-05,
+ "loss": 0.7125,
+ "step": 1071
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.278854481695093e-05,
+ "loss": 0.6703,
+ "step": 1072
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.277612183655044e-05,
+ "loss": 0.6878,
+ "step": 1073
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.276369421224655e-05,
+ "loss": 0.7435,
+ "step": 1074
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2751261964828214e-05,
+ "loss": 0.7735,
+ "step": 1075
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.273882511509213e-05,
+ "loss": 0.5964,
+ "step": 1076
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.272638368384269e-05,
+ "loss": 0.7331,
+ "step": 1077
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2713937691891953e-05,
+ "loss": 0.7225,
+ "step": 1078
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2701487160059601e-05,
+ "loss": 0.7315,
+ "step": 1079
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2689032109172914e-05,
+ "loss": 0.6482,
+ "step": 1080
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2676572560066736e-05,
+ "loss": 0.789,
+ "step": 1081
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2664108533583428e-05,
+ "loss": 0.6763,
+ "step": 1082
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2651640050572845e-05,
+ "loss": 0.736,
+ "step": 1083
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2639167131892294e-05,
+ "loss": 0.7044,
+ "step": 1084
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2626689798406506e-05,
+ "loss": 0.6952,
+ "step": 1085
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2614208070987592e-05,
+ "loss": 0.6376,
+ "step": 1086
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2601721970515024e-05,
+ "loss": 0.7509,
+ "step": 1087
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.258923151787557e-05,
+ "loss": 0.6675,
+ "step": 1088
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.25767367339633e-05,
+ "loss": 0.7606,
+ "step": 1089
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2564237639679512e-05,
+ "loss": 0.7229,
+ "step": 1090
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2551734255932728e-05,
+ "loss": 0.6808,
+ "step": 1091
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2539226603638633e-05,
+ "loss": 0.7193,
+ "step": 1092
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.252671470372006e-05,
+ "loss": 0.6949,
+ "step": 1093
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2514198577106948e-05,
+ "loss": 0.6993,
+ "step": 1094
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.25016782447363e-05,
+ "loss": 0.705,
+ "step": 1095
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2489153727552164e-05,
+ "loss": 0.7644,
+ "step": 1096
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.247662504650558e-05,
+ "loss": 0.758,
+ "step": 1097
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2464092222554554e-05,
+ "loss": 0.6515,
+ "step": 1098
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2451555276664032e-05,
+ "loss": 0.6896,
+ "step": 1099
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2439014229805836e-05,
+ "loss": 0.724,
+ "step": 1100
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2426469102958671e-05,
+ "loss": 0.768,
+ "step": 1101
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2413919917108053e-05,
+ "loss": 0.6448,
+ "step": 1102
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2401366693246289e-05,
+ "loss": 0.6587,
+ "step": 1103
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2388809452372442e-05,
+ "loss": 0.6829,
+ "step": 1104
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2376248215492297e-05,
+ "loss": 0.727,
+ "step": 1105
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2363683003618322e-05,
+ "loss": 0.7355,
+ "step": 1106
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.235111383776963e-05,
+ "loss": 0.6679,
+ "step": 1107
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.233854073897196e-05,
+ "loss": 0.7361,
+ "step": 1108
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2325963728257616e-05,
+ "loss": 0.7167,
+ "step": 1109
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2313382826665456e-05,
+ "loss": 0.6158,
+ "step": 1110
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2300798055240836e-05,
+ "loss": 0.6816,
+ "step": 1111
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2288209435035605e-05,
+ "loss": 0.7153,
+ "step": 1112
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2275616987108028e-05,
+ "loss": 0.685,
+ "step": 1113
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2263020732522786e-05,
+ "loss": 0.7036,
+ "step": 1114
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2250420692350921e-05,
+ "loss": 0.6627,
+ "step": 1115
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2237816887669817e-05,
+ "loss": 0.7332,
+ "step": 1116
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2225209339563144e-05,
+ "loss": 0.6258,
+ "step": 1117
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2212598069120845e-05,
+ "loss": 0.6943,
+ "step": 1118
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2199983097439079e-05,
+ "loss": 0.6218,
+ "step": 1119
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2187364445620203e-05,
+ "loss": 0.6348,
+ "step": 1120
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2174742134772726e-05,
+ "loss": 0.6696,
+ "step": 1121
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2162116186011285e-05,
+ "loss": 0.7279,
+ "step": 1122
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2149486620456599e-05,
+ "loss": 0.6674,
+ "step": 1123
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2136853459235427e-05,
+ "loss": 0.6465,
+ "step": 1124
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2124216723480565e-05,
+ "loss": 0.7242,
+ "step": 1125
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2111576434330767e-05,
+ "loss": 0.6946,
+ "step": 1126
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209893261293074e-05,
+ "loss": 0.6544,
+ "step": 1127
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2086285280431103e-05,
+ "loss": 0.6703,
+ "step": 1128
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2073634457988342e-05,
+ "loss": 0.7357,
+ "step": 1129
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2060980166764782e-05,
+ "loss": 0.6756,
+ "step": 1130
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2048322427928556e-05,
+ "loss": 0.7171,
+ "step": 1131
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2035661262653562e-05,
+ "loss": 0.6666,
+ "step": 1132
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2022996692119424e-05,
+ "loss": 0.7594,
+ "step": 1133
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2010328737511466e-05,
+ "loss": 0.648,
+ "step": 1134
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1997657420020678e-05,
+ "loss": 0.7421,
+ "step": 1135
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984982760843666e-05,
+ "loss": 0.6885,
+ "step": 1136
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1972304781182636e-05,
+ "loss": 0.7296,
+ "step": 1137
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.195962350224534e-05,
+ "loss": 0.6703,
+ "step": 1138
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1946938945245052e-05,
+ "loss": 0.6689,
+ "step": 1139
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1934251131400532e-05,
+ "loss": 0.6798,
+ "step": 1140
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1921560081935986e-05,
+ "loss": 0.6499,
+ "step": 1141
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1908865818081032e-05,
+ "loss": 0.706,
+ "step": 1142
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1896168361070664e-05,
+ "loss": 0.7832,
+ "step": 1143
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1883467732145224e-05,
+ "loss": 0.6323,
+ "step": 1144
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1870763952550347e-05,
+ "loss": 0.7339,
+ "step": 1145
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1858057043536955e-05,
+ "loss": 0.6856,
+ "step": 1146
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1845347026361192e-05,
+ "loss": 0.7769,
+ "step": 1147
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1832633922284407e-05,
+ "loss": 0.7252,
+ "step": 1148
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1819917752573109e-05,
+ "loss": 0.6517,
+ "step": 1149
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1807198538498944e-05,
+ "loss": 0.7214,
+ "step": 1150
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.179447630133864e-05,
+ "loss": 0.6832,
+ "step": 1151
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1781751062373982e-05,
+ "loss": 0.6402,
+ "step": 1152
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1769022842891794e-05,
+ "loss": 0.7082,
+ "step": 1153
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1756291664183858e-05,
+ "loss": 0.7173,
+ "step": 1154
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1743557547546928e-05,
+ "loss": 0.6631,
+ "step": 1155
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1730820514282669e-05,
+ "loss": 0.7071,
+ "step": 1156
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1718080585697617e-05,
+ "loss": 0.7505,
+ "step": 1157
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1705337783103155e-05,
+ "loss": 0.7402,
+ "step": 1158
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1692592127815475e-05,
+ "loss": 0.7419,
+ "step": 1159
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1679843641155544e-05,
+ "loss": 0.7011,
+ "step": 1160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1667092344449053e-05,
+ "loss": 0.7157,
+ "step": 1161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1654338259026413e-05,
+ "loss": 0.6883,
+ "step": 1162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1641581406222682e-05,
+ "loss": 0.6543,
+ "step": 1163
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1628821807377557e-05,
+ "loss": 0.7649,
+ "step": 1164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1616059483835325e-05,
+ "loss": 0.6502,
+ "step": 1165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1603294456944835e-05,
+ "loss": 0.6829,
+ "step": 1166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1590526748059453e-05,
+ "loss": 0.713,
+ "step": 1167
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1577756378537033e-05,
+ "loss": 0.6524,
+ "step": 1168
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1564983369739884e-05,
+ "loss": 0.6999,
+ "step": 1169
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.155220774303472e-05,
+ "loss": 0.5986,
+ "step": 1170
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1539429519792647e-05,
+ "loss": 0.6603,
+ "step": 1171
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1526648721389105e-05,
+ "loss": 0.7486,
+ "step": 1172
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1513865369203844e-05,
+ "loss": 0.6521,
+ "step": 1173
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1501079484620887e-05,
+ "loss": 0.7349,
+ "step": 1174
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1488291089028501e-05,
+ "loss": 0.7817,
+ "step": 1175
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1475500203819135e-05,
+ "loss": 0.7151,
+ "step": 1176
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1462706850389413e-05,
+ "loss": 0.699,
+ "step": 1177
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1449911050140097e-05,
+ "loss": 0.6815,
+ "step": 1178
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1437112824476025e-05,
+ "loss": 0.7254,
+ "step": 1179
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.14243121948061e-05,
+ "loss": 0.7336,
+ "step": 1180
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1411509182543253e-05,
+ "loss": 0.7409,
+ "step": 1181
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1398703809104392e-05,
+ "loss": 0.702,
+ "step": 1182
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1385896095910374e-05,
+ "loss": 0.7094,
+ "step": 1183
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1373086064385974e-05,
+ "loss": 0.7456,
+ "step": 1184
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1360273735959848e-05,
+ "loss": 0.7232,
+ "step": 1185
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1347459132064485e-05,
+ "loss": 0.635,
+ "step": 1186
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1334642274136186e-05,
+ "loss": 0.712,
+ "step": 1187
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1321823183615027e-05,
+ "loss": 0.7039,
+ "step": 1188
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.130900188194481e-05,
+ "loss": 0.7892,
+ "step": 1189
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1296178390573038e-05,
+ "loss": 0.7481,
+ "step": 1190
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283352730950886e-05,
+ "loss": 0.7033,
+ "step": 1191
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1270524924533146e-05,
+ "loss": 0.6883,
+ "step": 1192
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1257694992778199e-05,
+ "loss": 0.6783,
+ "step": 1193
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1244862957147993e-05,
+ "loss": 0.6984,
+ "step": 1194
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1232028839107985e-05,
+ "loss": 0.6964,
+ "step": 1195
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1219192660127117e-05,
+ "loss": 0.6882,
+ "step": 1196
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1206354441677782e-05,
+ "loss": 0.7332,
+ "step": 1197
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1193514205235789e-05,
+ "loss": 0.6751,
+ "step": 1198
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1180671972280311e-05,
+ "loss": 0.7269,
+ "step": 1199
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1167827764293867e-05,
+ "loss": 0.693,
+ "step": 1200
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.115498160276228e-05,
+ "loss": 0.7177,
+ "step": 1201
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1142133509174644e-05,
+ "loss": 0.717,
+ "step": 1202
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1129283505023273e-05,
+ "loss": 0.6759,
+ "step": 1203
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116431611803696e-05,
+ "loss": 0.633,
+ "step": 1204
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1103577851014587e-05,
+ "loss": 0.6781,
+ "step": 1205
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.109072224415774e-05,
+ "loss": 0.6274,
+ "step": 1206
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1077864812738061e-05,
+ "loss": 0.6786,
+ "step": 1207
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1065005578263483e-05,
+ "loss": 0.7342,
+ "step": 1208
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1052144562244967e-05,
+ "loss": 0.7327,
+ "step": 1209
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1039281786196453e-05,
+ "loss": 0.6939,
+ "step": 1210
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1026417271634821e-05,
+ "loss": 0.7292,
+ "step": 1211
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1013551040079866e-05,
+ "loss": 0.6944,
+ "step": 1212
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1000683113054251e-05,
+ "loss": 0.7309,
+ "step": 1213
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0987813512083476e-05,
+ "loss": 0.7714,
+ "step": 1214
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0974942258695838e-05,
+ "loss": 0.6908,
+ "step": 1215
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0962069374422407e-05,
+ "loss": 0.6826,
+ "step": 1216
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0949194880796967e-05,
+ "loss": 0.7019,
+ "step": 1217
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0936318799356009e-05,
+ "loss": 0.7057,
+ "step": 1218
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0923441151638676e-05,
+ "loss": 0.7641,
+ "step": 1219
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.091056195918672e-05,
+ "loss": 0.7469,
+ "step": 1220
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0897681243544492e-05,
+ "loss": 0.7109,
+ "step": 1221
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0884799026258879e-05,
+ "loss": 0.7088,
+ "step": 1222
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0871915328879294e-05,
+ "loss": 0.6816,
+ "step": 1223
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.085903017295761e-05,
+ "loss": 0.7101,
+ "step": 1224
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0846143580048152e-05,
+ "loss": 0.6097,
+ "step": 1225
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.083325557170764e-05,
+ "loss": 0.7659,
+ "step": 1226
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0820366169495171e-05,
+ "loss": 0.7452,
+ "step": 1227
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0807475394972164e-05,
+ "loss": 0.786,
+ "step": 1228
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0794583269702341e-05,
+ "loss": 0.7378,
+ "step": 1229
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0781689815251682e-05,
+ "loss": 0.8072,
+ "step": 1230
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0768795053188379e-05,
+ "loss": 0.6954,
+ "step": 1231
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0755899005082837e-05,
+ "loss": 0.6821,
+ "step": 1232
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0743001692507586e-05,
+ "loss": 0.682,
+ "step": 1233
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0730103137037284e-05,
+ "loss": 0.6474,
+ "step": 1234
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0717203360248669e-05,
+ "loss": 0.5955,
+ "step": 1235
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.070430238372052e-05,
+ "loss": 0.6783,
+ "step": 1236
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0691400229033617e-05,
+ "loss": 0.6267,
+ "step": 1237
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.067849691777072e-05,
+ "loss": 0.7146,
+ "step": 1238
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0665592471516518e-05,
+ "loss": 0.6971,
+ "step": 1239
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0652686911857599e-05,
+ "loss": 0.776,
+ "step": 1240
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0639780260382417e-05,
+ "loss": 0.6909,
+ "step": 1241
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.062687253868125e-05,
+ "loss": 0.7109,
+ "step": 1242
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0613963768346167e-05,
+ "loss": 0.5816,
+ "step": 1243
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.060105397097099e-05,
+ "loss": 0.7576,
+ "step": 1244
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0588143168151257e-05,
+ "loss": 0.7012,
+ "step": 1245
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0575231381484195e-05,
+ "loss": 0.6383,
+ "step": 1246
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0562318632568667e-05,
+ "loss": 0.7184,
+ "step": 1247
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0549404943005157e-05,
+ "loss": 0.7129,
+ "step": 1248
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0536490334395713e-05,
+ "loss": 0.7095,
+ "step": 1249
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0523574828343923e-05,
+ "loss": 0.7364,
+ "step": 1250
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.051065844645488e-05,
+ "loss": 0.6629,
+ "step": 1251
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.049774121033514e-05,
+ "loss": 0.7453,
+ "step": 1252
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0484823141592682e-05,
+ "loss": 0.6834,
+ "step": 1253
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0471904261836888e-05,
+ "loss": 0.6468,
+ "step": 1254
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.045898459267849e-05,
+ "loss": 0.7085,
+ "step": 1255
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0446064155729538e-05,
+ "loss": 0.6997,
+ "step": 1256
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0433142972603378e-05,
+ "loss": 0.6849,
+ "step": 1257
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0420221064914592e-05,
+ "loss": 0.7047,
+ "step": 1258
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0407298454278983e-05,
+ "loss": 0.7369,
+ "step": 1259
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0394375162313519e-05,
+ "loss": 0.6646,
+ "step": 1260
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0381451210636321e-05,
+ "loss": 0.7039,
+ "step": 1261
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0368526620866604e-05,
+ "loss": 0.7003,
+ "step": 1262
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0355601414624653e-05,
+ "loss": 0.7145,
+ "step": 1263
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0342675613531785e-05,
+ "loss": 0.6159,
+ "step": 1264
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0329749239210311e-05,
+ "loss": 0.7379,
+ "step": 1265
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0316822313283504e-05,
+ "loss": 0.6228,
+ "step": 1266
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0303894857375557e-05,
+ "loss": 0.7528,
+ "step": 1267
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0290966893111547e-05,
+ "loss": 0.6973,
+ "step": 1268
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0278038442117405e-05,
+ "loss": 0.6374,
+ "step": 1269
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0265109526019878e-05,
+ "loss": 0.7453,
+ "step": 1270
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.025218016644649e-05,
+ "loss": 0.7295,
+ "step": 1271
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0239250385025496e-05,
+ "loss": 0.6689,
+ "step": 1272
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0226320203385877e-05,
+ "loss": 0.6478,
+ "step": 1273
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0213389643157268e-05,
+ "loss": 0.7025,
+ "step": 1274
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.020045872596994e-05,
+ "loss": 0.6906,
+ "step": 1275
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0187527473454764e-05,
+ "loss": 0.7269,
+ "step": 1276
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0174595907243172e-05,
+ "loss": 0.7058,
+ "step": 1277
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.016166404896712e-05,
+ "loss": 0.6016,
+ "step": 1278
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.014873192025905e-05,
+ "loss": 0.753,
+ "step": 1279
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0135799542751861e-05,
+ "loss": 0.6989,
+ "step": 1280
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0122866938078863e-05,
+ "loss": 0.6376,
+ "step": 1281
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0109934127873751e-05,
+ "loss": 0.6318,
+ "step": 1282
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0097001133770561e-05,
+ "loss": 0.7521,
+ "step": 1283
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0084067977403637e-05,
+ "loss": 0.7343,
+ "step": 1284
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0071134680407593e-05,
+ "loss": 0.5818,
+ "step": 1285
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0058201264417285e-05,
+ "loss": 0.7092,
+ "step": 1286
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0045267751067758e-05,
+ "loss": 0.6799,
+ "step": 1287
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0032334161994225e-05,
+ "loss": 0.7875,
+ "step": 1288
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0019400518832028e-05,
+ "loss": 0.6872,
+ "step": 1289
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0006466843216593e-05,
+ "loss": 0.695,
+ "step": 1290
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.993533156783409e-06,
+ "loss": 0.634,
+ "step": 1291
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.980599481167977e-06,
+ "loss": 0.7041,
+ "step": 1292
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.967665838005777e-06,
+ "loss": 0.7205,
+ "step": 1293
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.954732248932243e-06,
+ "loss": 0.6748,
+ "step": 1294
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.941798735582718e-06,
+ "loss": 0.7556,
+ "step": 1295
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.92886531959241e-06,
+ "loss": 0.6823,
+ "step": 1296
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.915932022596368e-06,
+ "loss": 0.6595,
+ "step": 1297
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.90299886622944e-06,
+ "loss": 0.6156,
+ "step": 1298
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.890065872126252e-06,
+ "loss": 0.7145,
+ "step": 1299
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.877133061921138e-06,
+ "loss": 0.6986,
+ "step": 1300
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.864200457248144e-06,
+ "loss": 0.6416,
+ "step": 1301
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.851268079740951e-06,
+ "loss": 0.67,
+ "step": 1302
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.838335951032883e-06,
+ "loss": 0.6946,
+ "step": 1303
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.82540409275683e-06,
+ "loss": 0.6245,
+ "step": 1304
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.81247252654524e-06,
+ "loss": 0.6579,
+ "step": 1305
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.799541274030065e-06,
+ "loss": 0.6849,
+ "step": 1306
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.786610356842734e-06,
+ "loss": 0.6448,
+ "step": 1307
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.773679796614124e-06,
+ "loss": 0.6577,
+ "step": 1308
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.760749614974506e-06,
+ "loss": 0.6749,
+ "step": 1309
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.747819833553516e-06,
+ "loss": 0.6146,
+ "step": 1310
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.734890473980122e-06,
+ "loss": 0.6699,
+ "step": 1311
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.721961557882597e-06,
+ "loss": 0.6602,
+ "step": 1312
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.709033106888456e-06,
+ "loss": 0.6989,
+ "step": 1313
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.696105142624448e-06,
+ "loss": 0.7192,
+ "step": 1314
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.683177686716501e-06,
+ "loss": 0.7067,
+ "step": 1315
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.670250760789692e-06,
+ "loss": 0.6592,
+ "step": 1316
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.657324386468217e-06,
+ "loss": 0.7786,
+ "step": 1317
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.644398585375352e-06,
+ "loss": 0.7614,
+ "step": 1318
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.6314733791334e-06,
+ "loss": 0.6812,
+ "step": 1319
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.61854878936368e-06,
+ "loss": 0.7168,
+ "step": 1320
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.605624837686483e-06,
+ "loss": 0.7478,
+ "step": 1321
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.592701545721022e-06,
+ "loss": 0.6494,
+ "step": 1322
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.57977893508541e-06,
+ "loss": 0.6725,
+ "step": 1323
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.566857027396627e-06,
+ "loss": 0.6252,
+ "step": 1324
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.553935844270464e-06,
+ "loss": 0.7419,
+ "step": 1325
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.541015407321514e-06,
+ "loss": 0.727,
+ "step": 1326
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.528095738163117e-06,
+ "loss": 0.6605,
+ "step": 1327
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.515176858407323e-06,
+ "loss": 0.6691,
+ "step": 1328
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.502258789664865e-06,
+ "loss": 0.6714,
+ "step": 1329
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.489341553545121e-06,
+ "loss": 0.7405,
+ "step": 1330
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.47642517165608e-06,
+ "loss": 0.7016,
+ "step": 1331
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.46350966560429e-06,
+ "loss": 0.6025,
+ "step": 1332
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.450595056994848e-06,
+ "loss": 0.633,
+ "step": 1333
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.437681367431335e-06,
+ "loss": 0.6259,
+ "step": 1334
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.424768618515809e-06,
+ "loss": 0.7066,
+ "step": 1335
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.411856831848745e-06,
+ "loss": 0.7224,
+ "step": 1336
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.398946029029015e-06,
+ "loss": 0.7557,
+ "step": 1337
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.386036231653836e-06,
+ "loss": 0.7207,
+ "step": 1338
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.37312746131875e-06,
+ "loss": 0.725,
+ "step": 1339
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.360219739617585e-06,
+ "loss": 0.6347,
+ "step": 1340
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.347313088142403e-06,
+ "loss": 0.5741,
+ "step": 1341
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.334407528483487e-06,
+ "loss": 0.7209,
+ "step": 1342
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.321503082229281e-06,
+ "loss": 0.6941,
+ "step": 1343
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.308599770966385e-06,
+ "loss": 0.686,
+ "step": 1344
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.295697616279483e-06,
+ "loss": 0.7042,
+ "step": 1345
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.282796639751333e-06,
+ "loss": 0.6954,
+ "step": 1346
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.269896862962718e-06,
+ "loss": 0.7053,
+ "step": 1347
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.256998307492415e-06,
+ "loss": 0.7352,
+ "step": 1348
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.244100994917167e-06,
+ "loss": 0.7288,
+ "step": 1349
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.231204946811624e-06,
+ "loss": 0.5782,
+ "step": 1350
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.218310184748325e-06,
+ "loss": 0.6245,
+ "step": 1351
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.20541673029766e-06,
+ "loss": 0.6147,
+ "step": 1352
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.192524605027838e-06,
+ "loss": 0.6965,
+ "step": 1353
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.179633830504832e-06,
+ "loss": 0.7483,
+ "step": 1354
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.166744428292362e-06,
+ "loss": 0.7384,
+ "step": 1355
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.153856419951854e-06,
+ "loss": 0.779,
+ "step": 1356
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.140969827042392e-06,
+ "loss": 0.7438,
+ "step": 1357
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.128084671120709e-06,
+ "loss": 0.6946,
+ "step": 1358
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.115200973741123e-06,
+ "loss": 0.6906,
+ "step": 1359
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.102318756455513e-06,
+ "loss": 0.7552,
+ "step": 1360
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.089438040813282e-06,
+ "loss": 0.6676,
+ "step": 1361
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.076558848361328e-06,
+ "loss": 0.7069,
+ "step": 1362
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.063681200643993e-06,
+ "loss": 0.6886,
+ "step": 1363
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.050805119203035e-06,
+ "loss": 0.7038,
+ "step": 1364
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.037930625577598e-06,
+ "loss": 0.6809,
+ "step": 1365
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.025057741304164e-06,
+ "loss": 0.6361,
+ "step": 1366
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.012186487916526e-06,
+ "loss": 0.6991,
+ "step": 1367
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.999316886945752e-06,
+ "loss": 0.7207,
+ "step": 1368
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.986448959920137e-06,
+ "loss": 0.6511,
+ "step": 1369
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.97358272836518e-06,
+ "loss": 0.6399,
+ "step": 1370
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.96071821380355e-06,
+ "loss": 0.7298,
+ "step": 1371
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.947855437755036e-06,
+ "loss": 0.6652,
+ "step": 1372
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.934994421736519e-06,
+ "loss": 0.6792,
+ "step": 1373
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.922135187261944e-06,
+ "loss": 0.7164,
+ "step": 1374
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.90927775584226e-06,
+ "loss": 0.6937,
+ "step": 1375
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.896422148985418e-06,
+ "loss": 0.6969,
+ "step": 1376
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.883568388196308e-06,
+ "loss": 0.6886,
+ "step": 1377
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.87071649497673e-06,
+ "loss": 0.6636,
+ "step": 1378
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.85786649082536e-06,
+ "loss": 0.6867,
+ "step": 1379
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.845018397237722e-06,
+ "loss": 0.7726,
+ "step": 1380
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.832172235706137e-06,
+ "loss": 0.761,
+ "step": 1381
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.819328027719692e-06,
+ "loss": 0.6859,
+ "step": 1382
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.806485794764215e-06,
+ "loss": 0.7139,
+ "step": 1383
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.793645558322218e-06,
+ "loss": 0.6947,
+ "step": 1384
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.780807339872886e-06,
+ "loss": 0.7638,
+ "step": 1385
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.76797116089202e-06,
+ "loss": 0.7058,
+ "step": 1386
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.755137042852012e-06,
+ "loss": 0.6667,
+ "step": 1387
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.742305007221801e-06,
+ "loss": 0.6283,
+ "step": 1388
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.729475075466856e-06,
+ "loss": 0.737,
+ "step": 1389
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.716647269049115e-06,
+ "loss": 0.712,
+ "step": 1390
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.703821609426963e-06,
+ "loss": 0.6487,
+ "step": 1391
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.690998118055193e-06,
+ "loss": 0.7116,
+ "step": 1392
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.678176816384975e-06,
+ "loss": 0.7156,
+ "step": 1393
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.665357725863816e-06,
+ "loss": 0.7352,
+ "step": 1394
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.65254086793552e-06,
+ "loss": 0.6926,
+ "step": 1395
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.639726264040159e-06,
+ "loss": 0.7255,
+ "step": 1396
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.626913935614026e-06,
+ "loss": 0.6113,
+ "step": 1397
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.61410390408963e-06,
+ "loss": 0.7548,
+ "step": 1398
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.601296190895611e-06,
+ "loss": 0.6867,
+ "step": 1399
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.588490817456749e-06,
+ "loss": 0.7505,
+ "step": 1400
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.575687805193902e-06,
+ "loss": 0.7101,
+ "step": 1401
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.562887175523977e-06,
+ "loss": 0.7001,
+ "step": 1402
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.550088949859906e-06,
+ "loss": 0.7388,
+ "step": 1403
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.53729314961059e-06,
+ "loss": 0.5856,
+ "step": 1404
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.52449979618087e-06,
+ "loss": 0.6807,
+ "step": 1405
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.511708910971506e-06,
+ "loss": 0.706,
+ "step": 1406
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.498920515379112e-06,
+ "loss": 0.7484,
+ "step": 1407
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.486134630796157e-06,
+ "loss": 0.682,
+ "step": 1408
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.4733512786109e-06,
+ "loss": 0.6337,
+ "step": 1409
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.460570480207358e-06,
+ "loss": 0.6727,
+ "step": 1410
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.447792256965282e-06,
+ "loss": 0.6568,
+ "step": 1411
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.43501663026012e-06,
+ "loss": 0.6837,
+ "step": 1412
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.42224362146297e-06,
+ "loss": 0.6868,
+ "step": 1413
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.409473251940549e-06,
+ "loss": 0.6625,
+ "step": 1414
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.39670554305517e-06,
+ "loss": 0.7005,
+ "step": 1415
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.383940516164677e-06,
+ "loss": 0.6969,
+ "step": 1416
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.371178192622445e-06,
+ "loss": 0.7158,
+ "step": 1417
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.358418593777322e-06,
+ "loss": 0.6658,
+ "step": 1418
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.345661740973592e-06,
+ "loss": 0.6951,
+ "step": 1419
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.332907655550948e-06,
+ "loss": 0.6511,
+ "step": 1420
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.32015635884446e-06,
+ "loss": 0.654,
+ "step": 1421
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.307407872184527e-06,
+ "loss": 0.6727,
+ "step": 1422
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.294662216896848e-06,
+ "loss": 0.6546,
+ "step": 1423
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.281919414302388e-06,
+ "loss": 0.6165,
+ "step": 1424
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.269179485717331e-06,
+ "loss": 0.7125,
+ "step": 1425
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.256442452453073e-06,
+ "loss": 0.6387,
+ "step": 1426
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.243708335816145e-06,
+ "loss": 0.6934,
+ "step": 1427
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.230977157108213e-06,
+ "loss": 0.6297,
+ "step": 1428
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.218248937626018e-06,
+ "loss": 0.7264,
+ "step": 1429
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.205523698661365e-06,
+ "loss": 0.7113,
+ "step": 1430
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.19280146150106e-06,
+ "loss": 0.6275,
+ "step": 1431
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.180082247426895e-06,
+ "loss": 0.7128,
+ "step": 1432
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.167366077715598e-06,
+ "loss": 0.6883,
+ "step": 1433
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.15465297363881e-06,
+ "loss": 0.7201,
+ "step": 1434
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.141942956463049e-06,
+ "loss": 0.7038,
+ "step": 1435
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.129236047449654e-06,
+ "loss": 0.6751,
+ "step": 1436
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.116532267854781e-06,
+ "loss": 0.6236,
+ "step": 1437
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.103831638929337e-06,
+ "loss": 0.7242,
+ "step": 1438
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.091134181918971e-06,
+ "loss": 0.6346,
+ "step": 1439
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.078439918064016e-06,
+ "loss": 0.7456,
+ "step": 1440
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.065748868599471e-06,
+ "loss": 0.7148,
+ "step": 1441
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.053061054754952e-06,
+ "loss": 0.6309,
+ "step": 1442
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.04037649775466e-06,
+ "loss": 0.7147,
+ "step": 1443
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.027695218817366e-06,
+ "loss": 0.6444,
+ "step": 1444
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.015017239156338e-06,
+ "loss": 0.6877,
+ "step": 1445
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.002342579979327e-06,
+ "loss": 0.6742,
+ "step": 1446
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.989671262488534e-06,
+ "loss": 0.6934,
+ "step": 1447
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.977003307880579e-06,
+ "loss": 0.7024,
+ "step": 1448
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.964338737346441e-06,
+ "loss": 0.6979,
+ "step": 1449
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.951677572071446e-06,
+ "loss": 0.7419,
+ "step": 1450
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.939019833235223e-06,
+ "loss": 0.6657,
+ "step": 1451
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.926365542011662e-06,
+ "loss": 0.7027,
+ "step": 1452
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.9137147195689e-06,
+ "loss": 0.6929,
+ "step": 1453
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.901067387069264e-06,
+ "loss": 0.6948,
+ "step": 1454
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.888423565669236e-06,
+ "loss": 0.7046,
+ "step": 1455
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.875783276519435e-06,
+ "loss": 0.7287,
+ "step": 1456
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.863146540764572e-06,
+ "loss": 0.7129,
+ "step": 1457
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.850513379543405e-06,
+ "loss": 0.6752,
+ "step": 1458
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.837883813988717e-06,
+ "loss": 0.6347,
+ "step": 1459
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.825257865227278e-06,
+ "loss": 0.6704,
+ "step": 1460
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.812635554379802e-06,
+ "loss": 0.7374,
+ "step": 1461
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.800016902560924e-06,
+ "loss": 0.728,
+ "step": 1462
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.787401930879159e-06,
+ "loss": 0.6032,
+ "step": 1463
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.774790660436857e-06,
+ "loss": 0.655,
+ "step": 1464
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.762183112330182e-06,
+ "loss": 0.6702,
+ "step": 1465
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.74957930764908e-06,
+ "loss": 0.7316,
+ "step": 1466
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.736979267477218e-06,
+ "loss": 0.763,
+ "step": 1467
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.724383012891976e-06,
+ "loss": 0.6746,
+ "step": 1468
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.7117905649644e-06,
+ "loss": 0.6583,
+ "step": 1469
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.699201944759164e-06,
+ "loss": 0.6852,
+ "step": 1470
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.686617173334547e-06,
+ "loss": 0.7423,
+ "step": 1471
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.674036271742388e-06,
+ "loss": 0.6801,
+ "step": 1472
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.661459261028045e-06,
+ "loss": 0.7574,
+ "step": 1473
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.648886162230373e-06,
+ "loss": 0.6491,
+ "step": 1474
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.63631699638168e-06,
+ "loss": 0.6998,
+ "step": 1475
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.623751784507707e-06,
+ "loss": 0.6703,
+ "step": 1476
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.6111905476275606e-06,
+ "loss": 0.6968,
+ "step": 1477
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.598633306753715e-06,
+ "loss": 0.7136,
+ "step": 1478
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.586080082891949e-06,
+ "loss": 0.6827,
+ "step": 1479
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.57353089704133e-06,
+ "loss": 0.6491,
+ "step": 1480
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.5609857701941645e-06,
+ "loss": 0.6936,
+ "step": 1481
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.548444723335975e-06,
+ "loss": 0.6825,
+ "step": 1482
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.535907777445449e-06,
+ "loss": 0.6472,
+ "step": 1483
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.523374953494422e-06,
+ "loss": 0.656,
+ "step": 1484
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.510846272447839e-06,
+ "loss": 0.6643,
+ "step": 1485
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.498321755263704e-06,
+ "loss": 0.6289,
+ "step": 1486
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.485801422893057e-06,
+ "loss": 0.6727,
+ "step": 1487
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.473285296279942e-06,
+ "loss": 0.6768,
+ "step": 1488
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.460773396361371e-06,
+ "loss": 0.7319,
+ "step": 1489
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.448265744067275e-06,
+ "loss": 0.7342,
+ "step": 1490
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.435762360320491e-06,
+ "loss": 0.6721,
+ "step": 1491
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.423263266036705e-06,
+ "loss": 0.7585,
+ "step": 1492
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.410768482124431e-06,
+ "loss": 0.6734,
+ "step": 1493
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3982780294849795e-06,
+ "loss": 0.6777,
+ "step": 1494
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.38579192901241e-06,
+ "loss": 0.7117,
+ "step": 1495
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.373310201593498e-06,
+ "loss": 0.7209,
+ "step": 1496
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.360832868107708e-06,
+ "loss": 0.7735,
+ "step": 1497
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.348359949427159e-06,
+ "loss": 0.6869,
+ "step": 1498
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.335891466416574e-06,
+ "loss": 0.5594,
+ "step": 1499
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.323427439933269e-06,
+ "loss": 0.6535,
+ "step": 1500
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.31096789082709e-06,
+ "loss": 0.7319,
+ "step": 1501
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2985128399404016e-06,
+ "loss": 0.7149,
+ "step": 1502
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2860623081080506e-06,
+ "loss": 0.6002,
+ "step": 1503
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.273616316157312e-06,
+ "loss": 0.7053,
+ "step": 1504
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.261174884907874e-06,
+ "loss": 0.6951,
+ "step": 1505
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.248738035171787e-06,
+ "loss": 0.7049,
+ "step": 1506
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.236305787753454e-06,
+ "loss": 0.644,
+ "step": 1507
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.223878163449563e-06,
+ "loss": 0.6397,
+ "step": 1508
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.211455183049072e-06,
+ "loss": 0.7898,
+ "step": 1509
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.199036867333179e-06,
+ "loss": 0.7426,
+ "step": 1510
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.186623237075265e-06,
+ "loss": 0.6811,
+ "step": 1511
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.174214313040888e-06,
+ "loss": 0.7477,
+ "step": 1512
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.161810115987727e-06,
+ "loss": 0.7154,
+ "step": 1513
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.14941066666555e-06,
+ "loss": 0.6793,
+ "step": 1514
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.137015985816183e-06,
+ "loss": 0.7091,
+ "step": 1515
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.124626094173484e-06,
+ "loss": 0.6447,
+ "step": 1516
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.1122410124632935e-06,
+ "loss": 0.5612,
+ "step": 1517
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0998607614034035e-06,
+ "loss": 0.6695,
+ "step": 1518
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.087485361703532e-06,
+ "loss": 0.6622,
+ "step": 1519
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.075114834065272e-06,
+ "loss": 0.7255,
+ "step": 1520
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.06274919918208e-06,
+ "loss": 0.6315,
+ "step": 1521
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0503884777392166e-06,
+ "loss": 0.7214,
+ "step": 1522
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0380326904137265e-06,
+ "loss": 0.6558,
+ "step": 1523
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.025681857874398e-06,
+ "loss": 0.6793,
+ "step": 1524
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.013336000781739e-06,
+ "loss": 0.6945,
+ "step": 1525
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.000995139787929e-06,
+ "loss": 0.6842,
+ "step": 1526
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.988659295536792e-06,
+ "loss": 0.6629,
+ "step": 1527
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9763284886637516e-06,
+ "loss": 0.725,
+ "step": 1528
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.964002739795814e-06,
+ "loss": 0.6476,
+ "step": 1529
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9516820695515285e-06,
+ "loss": 0.6404,
+ "step": 1530
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.939366498540936e-06,
+ "loss": 0.7513,
+ "step": 1531
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.927056047365558e-06,
+ "loss": 0.7146,
+ "step": 1532
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.914750736618341e-06,
+ "loss": 0.6809,
+ "step": 1533
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9024505868836465e-06,
+ "loss": 0.6975,
+ "step": 1534
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.890155618737189e-06,
+ "loss": 0.6694,
+ "step": 1535
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.877865852746026e-06,
+ "loss": 0.6642,
+ "step": 1536
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.865581309468506e-06,
+ "loss": 0.6499,
+ "step": 1537
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.853302009454239e-06,
+ "loss": 0.7365,
+ "step": 1538
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.841027973244077e-06,
+ "loss": 0.6723,
+ "step": 1539
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.828759221370049e-06,
+ "loss": 0.6632,
+ "step": 1540
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.816495774355362e-06,
+ "loss": 0.6611,
+ "step": 1541
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.804237652714334e-06,
+ "loss": 0.6855,
+ "step": 1542
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7919848769523786e-06,
+ "loss": 0.695,
+ "step": 1543
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.779737467565974e-06,
+ "loss": 0.7267,
+ "step": 1544
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.767495445042618e-06,
+ "loss": 0.6369,
+ "step": 1545
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.755258829860791e-06,
+ "loss": 0.6477,
+ "step": 1546
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.743027642489935e-06,
+ "loss": 0.6843,
+ "step": 1547
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.730801903390412e-06,
+ "loss": 0.6827,
+ "step": 1548
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.718581633013465e-06,
+ "loss": 0.6704,
+ "step": 1549
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.706366851801194e-06,
+ "loss": 0.6607,
+ "step": 1550
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.694157580186516e-06,
+ "loss": 0.6457,
+ "step": 1551
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.681953838593125e-06,
+ "loss": 0.676,
+ "step": 1552
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.669755647435474e-06,
+ "loss": 0.6307,
+ "step": 1553
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.657563027118732e-06,
+ "loss": 0.619,
+ "step": 1554
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.645375998038737e-06,
+ "loss": 0.7404,
+ "step": 1555
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.633194580581978e-06,
+ "loss": 0.6862,
+ "step": 1556
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.621018795125569e-06,
+ "loss": 0.6284,
+ "step": 1557
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6088486620371905e-06,
+ "loss": 0.7825,
+ "step": 1558
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.596684201675069e-06,
+ "loss": 0.7164,
+ "step": 1559
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.584525434387945e-06,
+ "loss": 0.6827,
+ "step": 1560
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.572372380515031e-06,
+ "loss": 0.7518,
+ "step": 1561
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.560225060385992e-06,
+ "loss": 0.693,
+ "step": 1562
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.54808349432089e-06,
+ "loss": 0.6737,
+ "step": 1563
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.535947702630168e-06,
+ "loss": 0.7278,
+ "step": 1564
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.523817705614602e-06,
+ "loss": 0.5694,
+ "step": 1565
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.51169352356529e-06,
+ "loss": 0.675,
+ "step": 1566
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.499575176763591e-06,
+ "loss": 0.6165,
+ "step": 1567
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.487462685481103e-06,
+ "loss": 0.6449,
+ "step": 1568
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4753560699796305e-06,
+ "loss": 0.6909,
+ "step": 1569
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4632553505111484e-06,
+ "loss": 0.6852,
+ "step": 1570
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.451160547317777e-06,
+ "loss": 0.7175,
+ "step": 1571
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.439071680631725e-06,
+ "loss": 0.6935,
+ "step": 1572
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4269887706752845e-06,
+ "loss": 0.7277,
+ "step": 1573
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.414911837660768e-06,
+ "loss": 0.7027,
+ "step": 1574
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.402840901790508e-06,
+ "loss": 0.6911,
+ "step": 1575
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.39077598325679e-06,
+ "loss": 0.6798,
+ "step": 1576
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.378717102241842e-06,
+ "loss": 0.6847,
+ "step": 1577
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.366664278917789e-06,
+ "loss": 0.791,
+ "step": 1578
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3546175334466185e-06,
+ "loss": 0.7089,
+ "step": 1579
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.342576885980165e-06,
+ "loss": 0.7068,
+ "step": 1580
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.330542356660047e-06,
+ "loss": 0.6148,
+ "step": 1581
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.318513965617657e-06,
+ "loss": 0.6561,
+ "step": 1582
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.306491732974111e-06,
+ "loss": 0.6494,
+ "step": 1583
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2944756788402394e-06,
+ "loss": 0.6708,
+ "step": 1584
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.282465823316519e-06,
+ "loss": 0.6832,
+ "step": 1585
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.270462186493071e-06,
+ "loss": 0.6409,
+ "step": 1586
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2584647884496034e-06,
+ "loss": 0.6764,
+ "step": 1587
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.246473649255391e-06,
+ "loss": 0.7305,
+ "step": 1588
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.234488788969251e-06,
+ "loss": 0.6697,
+ "step": 1589
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.22251022763948e-06,
+ "loss": 0.7109,
+ "step": 1590
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.2105379853038436e-06,
+ "loss": 0.6964,
+ "step": 1591
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.19857208198954e-06,
+ "loss": 0.6269,
+ "step": 1592
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.186612537713164e-06,
+ "loss": 0.754,
+ "step": 1593
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1746593724806666e-06,
+ "loss": 0.6539,
+ "step": 1594
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.162712606287335e-06,
+ "loss": 0.6418,
+ "step": 1595
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.150772259117748e-06,
+ "loss": 0.6839,
+ "step": 1596
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.138838350945744e-06,
+ "loss": 0.6837,
+ "step": 1597
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.126910901734397e-06,
+ "loss": 0.6584,
+ "step": 1598
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.114989931435976e-06,
+ "loss": 0.7038,
+ "step": 1599
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.103075459991905e-06,
+ "loss": 0.7096,
+ "step": 1600
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0911675073327385e-06,
+ "loss": 0.7311,
+ "step": 1601
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.079266093378138e-06,
+ "loss": 0.6688,
+ "step": 1602
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.067371238036812e-06,
+ "loss": 0.7097,
+ "step": 1603
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.055482961206503e-06,
+ "loss": 0.7395,
+ "step": 1604
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.043601282773952e-06,
+ "loss": 0.7321,
+ "step": 1605
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.031726222614855e-06,
+ "loss": 0.6441,
+ "step": 1606
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.019857800593847e-06,
+ "loss": 0.6452,
+ "step": 1607
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.007996036564454e-06,
+ "loss": 0.6343,
+ "step": 1608
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9961409503690605e-06,
+ "loss": 0.685,
+ "step": 1609
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.984292561838881e-06,
+ "loss": 0.7425,
+ "step": 1610
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9724508907939345e-06,
+ "loss": 0.636,
+ "step": 1611
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.960615957042999e-06,
+ "loss": 0.6321,
+ "step": 1612
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.948787780383574e-06,
+ "loss": 0.6038,
+ "step": 1613
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.936966380601867e-06,
+ "loss": 0.6841,
+ "step": 1614
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.92515177747274e-06,
+ "loss": 0.6423,
+ "step": 1615
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.913343990759695e-06,
+ "loss": 0.7573,
+ "step": 1616
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.901543040214823e-06,
+ "loss": 0.7056,
+ "step": 1617
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.889748945578788e-06,
+ "loss": 0.7311,
+ "step": 1618
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.877961726580773e-06,
+ "loss": 0.6658,
+ "step": 1619
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.866181402938469e-06,
+ "loss": 0.6676,
+ "step": 1620
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.854407994358037e-06,
+ "loss": 0.6844,
+ "step": 1621
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.842641520534055e-06,
+ "loss": 0.7094,
+ "step": 1622
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.830882001149517e-06,
+ "loss": 0.714,
+ "step": 1623
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.819129455875764e-06,
+ "loss": 0.6831,
+ "step": 1624
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8073839043724935e-06,
+ "loss": 0.6503,
+ "step": 1625
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7956453662876935e-06,
+ "loss": 0.6665,
+ "step": 1626
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.783913861257611e-06,
+ "loss": 0.6718,
+ "step": 1627
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.772189408906739e-06,
+ "loss": 0.6557,
+ "step": 1628
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.760472028847773e-06,
+ "loss": 0.7091,
+ "step": 1629
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.748761740681573e-06,
+ "loss": 0.687,
+ "step": 1630
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.737058563997138e-06,
+ "loss": 0.6244,
+ "step": 1631
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.725362518371576e-06,
+ "loss": 0.6176,
+ "step": 1632
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.713673623370046e-06,
+ "loss": 0.7334,
+ "step": 1633
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7019918985457745e-06,
+ "loss": 0.6967,
+ "step": 1634
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6903173634399785e-06,
+ "loss": 0.7089,
+ "step": 1635
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.678650037581841e-06,
+ "loss": 0.6097,
+ "step": 1636
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.666989940488496e-06,
+ "loss": 0.6915,
+ "step": 1637
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.655337091664985e-06,
+ "loss": 0.6734,
+ "step": 1638
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.643691510604221e-06,
+ "loss": 0.6319,
+ "step": 1639
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.632053216786959e-06,
+ "loss": 0.6515,
+ "step": 1640
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.620422229681771e-06,
+ "loss": 0.665,
+ "step": 1641
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.608798568744989e-06,
+ "loss": 0.6528,
+ "step": 1642
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.597182253420712e-06,
+ "loss": 0.6214,
+ "step": 1643
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.585573303140741e-06,
+ "loss": 0.6524,
+ "step": 1644
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.573971737324551e-06,
+ "loss": 0.6888,
+ "step": 1645
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.56237757537927e-06,
+ "loss": 0.6338,
+ "step": 1646
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.550790836699642e-06,
+ "loss": 0.6045,
+ "step": 1647
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.539211540667992e-06,
+ "loss": 0.6497,
+ "step": 1648
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.527639706654195e-06,
+ "loss": 0.7128,
+ "step": 1649
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.516075354015646e-06,
+ "loss": 0.7082,
+ "step": 1650
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.504518502097212e-06,
+ "loss": 0.7198,
+ "step": 1651
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4929691702312395e-06,
+ "loss": 0.6538,
+ "step": 1652
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.481427377737465e-06,
+ "loss": 0.6431,
+ "step": 1653
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4698931439230314e-06,
+ "loss": 0.6853,
+ "step": 1654
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.458366488082436e-06,
+ "loss": 0.6935,
+ "step": 1655
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.446847429497494e-06,
+ "loss": 0.6817,
+ "step": 1656
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.435335987437316e-06,
+ "loss": 0.7473,
+ "step": 1657
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.423832181158274e-06,
+ "loss": 0.672,
+ "step": 1658
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.412336029903956e-06,
+ "loss": 0.7273,
+ "step": 1659
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.400847552905152e-06,
+ "loss": 0.7061,
+ "step": 1660
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.3893667693798245e-06,
+ "loss": 0.7223,
+ "step": 1661
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.377893698533048e-06,
+ "loss": 0.7037,
+ "step": 1662
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.366428359557003e-06,
+ "loss": 0.6729,
+ "step": 1663
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.354970771630945e-06,
+ "loss": 0.6661,
+ "step": 1664
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.34352095392114e-06,
+ "loss": 0.6196,
+ "step": 1665
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.332078925580883e-06,
+ "loss": 0.6468,
+ "step": 1666
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.320644705750431e-06,
+ "loss": 0.7018,
+ "step": 1667
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.309218313556961e-06,
+ "loss": 0.6739,
+ "step": 1668
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.297799768114575e-06,
+ "loss": 0.6662,
+ "step": 1669
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.286389088524253e-06,
+ "loss": 0.615,
+ "step": 1670
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.274986293873796e-06,
+ "loss": 0.6793,
+ "step": 1671
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.263591403237831e-06,
+ "loss": 0.6108,
+ "step": 1672
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.252204435677763e-06,
+ "loss": 0.6396,
+ "step": 1673
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2408254102417275e-06,
+ "loss": 0.6966,
+ "step": 1674
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2294543459646e-06,
+ "loss": 0.6318,
+ "step": 1675
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2180912618679195e-06,
+ "loss": 0.5945,
+ "step": 1676
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.206736176959877e-06,
+ "loss": 0.651,
+ "step": 1677
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.19538911023529e-06,
+ "loss": 0.7053,
+ "step": 1678
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.184050080675558e-06,
+ "loss": 0.7137,
+ "step": 1679
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1727191072486385e-06,
+ "loss": 0.6287,
+ "step": 1680
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.161396208909012e-06,
+ "loss": 0.6476,
+ "step": 1681
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.150081404597654e-06,
+ "loss": 0.6565,
+ "step": 1682
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.138774713241983e-06,
+ "loss": 0.6768,
+ "step": 1683
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.127476153755873e-06,
+ "loss": 0.6285,
+ "step": 1684
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.116185745039585e-06,
+ "loss": 0.6962,
+ "step": 1685
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.10490350597973e-06,
+ "loss": 0.6208,
+ "step": 1686
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.093629455449273e-06,
+ "loss": 0.6568,
+ "step": 1687
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.082363612307471e-06,
+ "loss": 0.7386,
+ "step": 1688
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.071105995399852e-06,
+ "loss": 0.5893,
+ "step": 1689
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.059856623558189e-06,
+ "loss": 0.6948,
+ "step": 1690
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0486155156004615e-06,
+ "loss": 0.637,
+ "step": 1691
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0373826903308095e-06,
+ "loss": 0.6883,
+ "step": 1692
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.026158166539548e-06,
+ "loss": 0.6737,
+ "step": 1693
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.014941963003075e-06,
+ "loss": 0.6526,
+ "step": 1694
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.003734098483887e-06,
+ "loss": 0.6807,
+ "step": 1695
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.992534591730527e-06,
+ "loss": 0.6702,
+ "step": 1696
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.981343461477559e-06,
+ "loss": 0.7365,
+ "step": 1697
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.970160726445531e-06,
+ "loss": 0.6826,
+ "step": 1698
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9589864053409545e-06,
+ "loss": 0.6753,
+ "step": 1699
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9478205168562535e-06,
+ "loss": 0.7563,
+ "step": 1700
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.936663079669753e-06,
+ "loss": 0.6508,
+ "step": 1701
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.925514112445653e-06,
+ "loss": 0.744,
+ "step": 1702
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.914373633833962e-06,
+ "loss": 0.6786,
+ "step": 1703
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9032416624705036e-06,
+ "loss": 0.7092,
+ "step": 1704
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.892118216976871e-06,
+ "loss": 0.6394,
+ "step": 1705
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.881003315960378e-06,
+ "loss": 0.7009,
+ "step": 1706
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.869896978014071e-06,
+ "loss": 0.6496,
+ "step": 1707
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.858799221716659e-06,
+ "loss": 0.6847,
+ "step": 1708
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.847710065632489e-06,
+ "loss": 0.6325,
+ "step": 1709
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.836629528311527e-06,
+ "loss": 0.7033,
+ "step": 1710
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.8255576282893355e-06,
+ "loss": 0.6784,
+ "step": 1711
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.814494384087004e-06,
+ "loss": 0.684,
+ "step": 1712
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.803439814211158e-06,
+ "loss": 0.6652,
+ "step": 1713
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.792393937153914e-06,
+ "loss": 0.6775,
+ "step": 1714
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.78135677139283e-06,
+ "loss": 0.7154,
+ "step": 1715
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.770328335390915e-06,
+ "loss": 0.7514,
+ "step": 1716
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.759308647596564e-06,
+ "loss": 0.6837,
+ "step": 1717
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7482977264435305e-06,
+ "loss": 0.7465,
+ "step": 1718
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.737295590350913e-06,
+ "loss": 0.6432,
+ "step": 1719
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.726302257723113e-06,
+ "loss": 0.733,
+ "step": 1720
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7153177469498045e-06,
+ "loss": 0.7405,
+ "step": 1721
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.704342076405902e-06,
+ "loss": 0.7309,
+ "step": 1722
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6933752644515404e-06,
+ "loss": 0.7439,
+ "step": 1723
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.682417329432016e-06,
+ "loss": 0.6355,
+ "step": 1724
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.671468289677802e-06,
+ "loss": 0.6897,
+ "step": 1725
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.660528163504479e-06,
+ "loss": 0.7471,
+ "step": 1726
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6495969692127084e-06,
+ "loss": 0.6558,
+ "step": 1727
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6386747250882226e-06,
+ "loss": 0.6728,
+ "step": 1728
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6277614494017784e-06,
+ "loss": 0.6643,
+ "step": 1729
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.616857160409131e-06,
+ "loss": 0.6779,
+ "step": 1730
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.605961876351001e-06,
+ "loss": 0.6482,
+ "step": 1731
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.595075615453049e-06,
+ "loss": 0.7287,
+ "step": 1732
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.584198395925829e-06,
+ "loss": 0.7293,
+ "step": 1733
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5733302359647955e-06,
+ "loss": 0.7243,
+ "step": 1734
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.562471153750221e-06,
+ "loss": 0.6778,
+ "step": 1735
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.55162116744721e-06,
+ "loss": 0.7359,
+ "step": 1736
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5407802952056435e-06,
+ "loss": 0.7185,
+ "step": 1737
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.529948555160162e-06,
+ "loss": 0.7164,
+ "step": 1738
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.519125965430125e-06,
+ "loss": 0.6615,
+ "step": 1739
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.508312544119592e-06,
+ "loss": 0.731,
+ "step": 1740
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.497508309317271e-06,
+ "loss": 0.7021,
+ "step": 1741
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.486713279096515e-06,
+ "loss": 0.6974,
+ "step": 1742
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.475927471515287e-06,
+ "loss": 0.6644,
+ "step": 1743
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.465150904616101e-06,
+ "loss": 0.613,
+ "step": 1744
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.454383596426028e-06,
+ "loss": 0.6468,
+ "step": 1745
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.443625564956648e-06,
+ "loss": 0.6952,
+ "step": 1746
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.432876828204022e-06,
+ "loss": 0.7141,
+ "step": 1747
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4221374041486654e-06,
+ "loss": 0.6983,
+ "step": 1748
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.411407310755513e-06,
+ "loss": 0.7541,
+ "step": 1749
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.400686565973886e-06,
+ "loss": 0.6189,
+ "step": 1750
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.389975187737472e-06,
+ "loss": 0.6853,
+ "step": 1751
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3792731939643015e-06,
+ "loss": 0.6722,
+ "step": 1752
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.368580602556687e-06,
+ "loss": 0.663,
+ "step": 1753
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3578974314012225e-06,
+ "loss": 0.6789,
+ "step": 1754
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.347223698368748e-06,
+ "loss": 0.7243,
+ "step": 1755
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.336559421314298e-06,
+ "loss": 0.6248,
+ "step": 1756
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.325904618077114e-06,
+ "loss": 0.6798,
+ "step": 1757
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.315259306480575e-06,
+ "loss": 0.7271,
+ "step": 1758
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.304623504332178e-06,
+ "loss": 0.7423,
+ "step": 1759
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.293997229423523e-06,
+ "loss": 0.6645,
+ "step": 1760
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.283380499530269e-06,
+ "loss": 0.7318,
+ "step": 1761
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2727733324121055e-06,
+ "loss": 0.6411,
+ "step": 1762
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.262175745812729e-06,
+ "loss": 0.6679,
+ "step": 1763
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.251587757459813e-06,
+ "loss": 0.6709,
+ "step": 1764
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.241009385064958e-06,
+ "loss": 0.7494,
+ "step": 1765
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2304406463237026e-06,
+ "loss": 0.665,
+ "step": 1766
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.219881558915458e-06,
+ "loss": 0.7075,
+ "step": 1767
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.209332140503486e-06,
+ "loss": 0.6923,
+ "step": 1768
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.198792408734882e-06,
+ "loss": 0.6472,
+ "step": 1769
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1882623812405345e-06,
+ "loss": 0.631,
+ "step": 1770
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1777420756351e-06,
+ "loss": 0.656,
+ "step": 1771
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1672315095169725e-06,
+ "loss": 0.6599,
+ "step": 1772
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1567307004682546e-06,
+ "loss": 0.6057,
+ "step": 1773
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.146239666054716e-06,
+ "loss": 0.6792,
+ "step": 1774
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.135758423825798e-06,
+ "loss": 0.7212,
+ "step": 1775
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.125286991314538e-06,
+ "loss": 0.6596,
+ "step": 1776
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1148253860375764e-06,
+ "loss": 0.7179,
+ "step": 1777
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.104373625495114e-06,
+ "loss": 0.6403,
+ "step": 1778
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.09393172717088e-06,
+ "loss": 0.636,
+ "step": 1779
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.083499708532108e-06,
+ "loss": 0.6884,
+ "step": 1780
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.073077587029509e-06,
+ "loss": 0.6418,
+ "step": 1781
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0626653800972236e-06,
+ "loss": 0.7026,
+ "step": 1782
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0522631051528205e-06,
+ "loss": 0.6739,
+ "step": 1783
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0418707795972575e-06,
+ "loss": 0.6537,
+ "step": 1784
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.031488420814834e-06,
+ "loss": 0.7288,
+ "step": 1785
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.021116046173189e-06,
+ "loss": 0.6679,
+ "step": 1786
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.010753673023254e-06,
+ "loss": 0.5788,
+ "step": 1787
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.000401318699236e-06,
+ "loss": 0.7004,
+ "step": 1788
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9900590005185735e-06,
+ "loss": 0.6704,
+ "step": 1789
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9797267357819294e-06,
+ "loss": 0.7152,
+ "step": 1790
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.969404541773132e-06,
+ "loss": 0.672,
+ "step": 1791
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.959092435759174e-06,
+ "loss": 0.6921,
+ "step": 1792
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.9487904349901815e-06,
+ "loss": 0.6828,
+ "step": 1793
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.938498556699357e-06,
+ "loss": 0.6523,
+ "step": 1794
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.928216818102982e-06,
+ "loss": 0.7139,
+ "step": 1795
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.917945236400374e-06,
+ "loss": 0.6329,
+ "step": 1796
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.907683828773861e-06,
+ "loss": 0.6907,
+ "step": 1797
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.897432612388752e-06,
+ "loss": 0.6982,
+ "step": 1798
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8871916043933065e-06,
+ "loss": 0.6181,
+ "step": 1799
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.876960821918704e-06,
+ "loss": 0.6547,
+ "step": 1800
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.866740282079023e-06,
+ "loss": 0.683,
+ "step": 1801
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8565300019712115e-06,
+ "loss": 0.6881,
+ "step": 1802
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.846329998675048e-06,
+ "loss": 0.7059,
+ "step": 1803
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.836140289253124e-06,
+ "loss": 0.5927,
+ "step": 1804
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8259608907508105e-06,
+ "loss": 0.6194,
+ "step": 1805
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.81579182019623e-06,
+ "loss": 0.7468,
+ "step": 1806
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8056330946002283e-06,
+ "loss": 0.7278,
+ "step": 1807
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7954847309563504e-06,
+ "loss": 0.7077,
+ "step": 1808
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7853467462407966e-06,
+ "loss": 0.6385,
+ "step": 1809
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7752191574124176e-06,
+ "loss": 0.6741,
+ "step": 1810
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7651019814126656e-06,
+ "loss": 0.6805,
+ "step": 1811
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7549952351655795e-06,
+ "loss": 0.6842,
+ "step": 1812
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7448989355777497e-06,
+ "loss": 0.6737,
+ "step": 1813
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.734813099538289e-06,
+ "loss": 0.6904,
+ "step": 1814
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.724737743918809e-06,
+ "loss": 0.6701,
+ "step": 1815
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7146728855733947e-06,
+ "loss": 0.7059,
+ "step": 1816
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.7046185413385573e-06,
+ "loss": 0.6716,
+ "step": 1817
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.694574728033231e-06,
+ "loss": 0.7257,
+ "step": 1818
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6845414624587326e-06,
+ "loss": 0.6694,
+ "step": 1819
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.674518761398733e-06,
+ "loss": 0.5989,
+ "step": 1820
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.664506641619232e-06,
+ "loss": 0.6673,
+ "step": 1821
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.65450511986853e-06,
+ "loss": 0.6542,
+ "step": 1822
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.644514212877187e-06,
+ "loss": 0.6736,
+ "step": 1823
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6345339373580267e-06,
+ "loss": 0.6751,
+ "step": 1824
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.62456431000608e-06,
+ "loss": 0.7007,
+ "step": 1825
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6146053474985565e-06,
+ "loss": 0.7167,
+ "step": 1826
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6046570664948357e-06,
+ "loss": 0.7238,
+ "step": 1827
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5947194836364264e-06,
+ "loss": 0.7089,
+ "step": 1828
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5847926155469427e-06,
+ "loss": 0.7119,
+ "step": 1829
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.574876478832072e-06,
+ "loss": 0.5882,
+ "step": 1830
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5649710900795564e-06,
+ "loss": 0.6866,
+ "step": 1831
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5550764658591486e-06,
+ "loss": 0.7146,
+ "step": 1832
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5451926227225997e-06,
+ "loss": 0.6344,
+ "step": 1833
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5353195772036352e-06,
+ "loss": 0.6955,
+ "step": 1834
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.525457345817902e-06,
+ "loss": 0.6149,
+ "step": 1835
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5156059450629677e-06,
+ "loss": 0.698,
+ "step": 1836
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5057653914182787e-06,
+ "loss": 0.6634,
+ "step": 1837
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4959357013451368e-06,
+ "loss": 0.689,
+ "step": 1838
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.486116891286674e-06,
+ "loss": 0.7024,
+ "step": 1839
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4763089776678206e-06,
+ "loss": 0.6366,
+ "step": 1840
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4665119768952736e-06,
+ "loss": 0.6924,
+ "step": 1841
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.456725905357482e-06,
+ "loss": 0.6439,
+ "step": 1842
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4469507794246105e-06,
+ "loss": 0.6317,
+ "step": 1843
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4371866154485147e-06,
+ "loss": 0.6748,
+ "step": 1844
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.42743342976271e-06,
+ "loss": 0.6984,
+ "step": 1845
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.417691238682351e-06,
+ "loss": 0.6212,
+ "step": 1846
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4079600585041993e-06,
+ "loss": 0.6976,
+ "step": 1847
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3982399055065953e-06,
+ "loss": 0.6835,
+ "step": 1848
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3885307959494374e-06,
+ "loss": 0.6526,
+ "step": 1849
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.378832746074143e-06,
+ "loss": 0.679,
+ "step": 1850
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.369145772103637e-06,
+ "loss": 0.7106,
+ "step": 1851
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.359469890242313e-06,
+ "loss": 0.6389,
+ "step": 1852
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.349805116676008e-06,
+ "loss": 0.6614,
+ "step": 1853
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3401514675719815e-06,
+ "loss": 0.6049,
+ "step": 1854
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3305089590788796e-06,
+ "loss": 0.7253,
+ "step": 1855
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3208776073267146e-06,
+ "loss": 0.6714,
+ "step": 1856
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3112574284268395e-06,
+ "loss": 0.6449,
+ "step": 1857
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.301648438471905e-06,
+ "loss": 0.652,
+ "step": 1858
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2920506535358566e-06,
+ "loss": 0.6559,
+ "step": 1859
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2824640896738935e-06,
+ "loss": 0.6817,
+ "step": 1860
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2728887629224415e-06,
+ "loss": 0.7093,
+ "step": 1861
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.263324689299132e-06,
+ "loss": 0.6716,
+ "step": 1862
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2537718848027744e-06,
+ "loss": 0.6428,
+ "step": 1863
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2442303654133124e-06,
+ "loss": 0.617,
+ "step": 1864
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.234700147091834e-06,
+ "loss": 0.6095,
+ "step": 1865
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.22518124578051e-06,
+ "loss": 0.7219,
+ "step": 1866
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.2156736774025766e-06,
+ "loss": 0.7127,
+ "step": 1867
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.206177457862321e-06,
+ "loss": 0.6998,
+ "step": 1868
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1966926030450408e-06,
+ "loss": 0.7302,
+ "step": 1869
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.187219128817025e-06,
+ "loss": 0.6725,
+ "step": 1870
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1777570510255262e-06,
+ "loss": 0.7287,
+ "step": 1871
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.168306385498733e-06,
+ "loss": 0.6571,
+ "step": 1872
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1588671480457324e-06,
+ "loss": 0.7383,
+ "step": 1873
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.149439354456514e-06,
+ "loss": 0.601,
+ "step": 1874
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1400230205019124e-06,
+ "loss": 0.6774,
+ "step": 1875
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1306181619335894e-06,
+ "loss": 0.7075,
+ "step": 1876
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.121224794484019e-06,
+ "loss": 0.6541,
+ "step": 1877
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1118429338664467e-06,
+ "loss": 0.6125,
+ "step": 1878
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.102472595774875e-06,
+ "loss": 0.5928,
+ "step": 1879
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.093113795884026e-06,
+ "loss": 0.6152,
+ "step": 1880
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0837665498493273e-06,
+ "loss": 0.6881,
+ "step": 1881
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.074430873306865e-06,
+ "loss": 0.6164,
+ "step": 1882
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0651067818733948e-06,
+ "loss": 0.7106,
+ "step": 1883
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.05579429114627e-06,
+ "loss": 0.6965,
+ "step": 1884
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.046493416703452e-06,
+ "loss": 0.6782,
+ "step": 1885
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0372041741034642e-06,
+ "loss": 0.6043,
+ "step": 1886
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0279265788853763e-06,
+ "loss": 0.6622,
+ "step": 1887
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0186606465687705e-06,
+ "loss": 0.6443,
+ "step": 1888
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0094063926537233e-06,
+ "loss": 0.6948,
+ "step": 1889
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.000163832620775e-06,
+ "loss": 0.6135,
+ "step": 1890
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.990932981930893e-06,
+ "loss": 0.5845,
+ "step": 1891
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.98171385602548e-06,
+ "loss": 0.6191,
+ "step": 1892
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.972506470326303e-06,
+ "loss": 0.5917,
+ "step": 1893
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9633108402355027e-06,
+ "loss": 0.6844,
+ "step": 1894
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.95412698113555e-06,
+ "loss": 0.7218,
+ "step": 1895
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.944954908389229e-06,
+ "loss": 0.7332,
+ "step": 1896
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.935794637339605e-06,
+ "loss": 0.6881,
+ "step": 1897
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9266461833100047e-06,
+ "loss": 0.7255,
+ "step": 1898
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.917509561603977e-06,
+ "loss": 0.7321,
+ "step": 1899
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9083847875052894e-06,
+ "loss": 0.6769,
+ "step": 1900
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.899271876277887e-06,
+ "loss": 0.6589,
+ "step": 1901
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.890170843165868e-06,
+ "loss": 0.6381,
+ "step": 1902
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.881081703393466e-06,
+ "loss": 0.6986,
+ "step": 1903
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8720044721650155e-06,
+ "loss": 0.6913,
+ "step": 1904
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.862939164664924e-06,
+ "loss": 0.7015,
+ "step": 1905
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8538857960576694e-06,
+ "loss": 0.6056,
+ "step": 1906
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.844844381487748e-06,
+ "loss": 0.6643,
+ "step": 1907
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.835814936079655e-06,
+ "loss": 0.7235,
+ "step": 1908
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8267974749378714e-06,
+ "loss": 0.5879,
+ "step": 1909
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8177920131468273e-06,
+ "loss": 0.6432,
+ "step": 1910
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.808798565770883e-06,
+ "loss": 0.6856,
+ "step": 1911
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.799817147854299e-06,
+ "loss": 0.643,
+ "step": 1912
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.790847774421215e-06,
+ "loss": 0.6152,
+ "step": 1913
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7818904604756125e-06,
+ "loss": 0.6932,
+ "step": 1914
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7729452210013173e-06,
+ "loss": 0.6679,
+ "step": 1915
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.764012070961948e-06,
+ "loss": 0.6698,
+ "step": 1916
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7550910253008933e-06,
+ "loss": 0.686,
+ "step": 1917
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7461820989413036e-06,
+ "loss": 0.6662,
+ "step": 1918
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.737285306786053e-06,
+ "loss": 0.7485,
+ "step": 1919
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7284006637177164e-06,
+ "loss": 0.6952,
+ "step": 1920
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7195281845985467e-06,
+ "loss": 0.6484,
+ "step": 1921
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7106678842704514e-06,
+ "loss": 0.6808,
+ "step": 1922
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.701819777554956e-06,
+ "loss": 0.6061,
+ "step": 1923
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6929838792532035e-06,
+ "loss": 0.6654,
+ "step": 1924
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.684160204145899e-06,
+ "loss": 0.6211,
+ "step": 1925
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.675348766993311e-06,
+ "loss": 0.6743,
+ "step": 1926
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.666549582535233e-06,
+ "loss": 0.6841,
+ "step": 1927
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6577626654909617e-06,
+ "loss": 0.6683,
+ "step": 1928
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.648988030559274e-06,
+ "loss": 0.6777,
+ "step": 1929
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.640225692418401e-06,
+ "loss": 0.712,
+ "step": 1930
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6314756657260053e-06,
+ "loss": 0.691,
+ "step": 1931
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.622737965119145e-06,
+ "loss": 0.6649,
+ "step": 1932
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6140126052142788e-06,
+ "loss": 0.7481,
+ "step": 1933
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6052996006072008e-06,
+ "loss": 0.5248,
+ "step": 1934
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5965989658730485e-06,
+ "loss": 0.6534,
+ "step": 1935
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5879107155662676e-06,
+ "loss": 0.6495,
+ "step": 1936
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5792348642205823e-06,
+ "loss": 0.6137,
+ "step": 1937
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.570571426348978e-06,
+ "loss": 0.6586,
+ "step": 1938
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5619204164436796e-06,
+ "loss": 0.6417,
+ "step": 1939
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.553281848976109e-06,
+ "loss": 0.7309,
+ "step": 1940
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.544655738396885e-06,
+ "loss": 0.628,
+ "step": 1941
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.536042099135795e-06,
+ "loss": 0.6734,
+ "step": 1942
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.527440945601747e-06,
+ "loss": 0.6809,
+ "step": 1943
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.518852292182774e-06,
+ "loss": 0.5833,
+ "step": 1944
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5102761532460005e-06,
+ "loss": 0.6427,
+ "step": 1945
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.501712543137602e-06,
+ "loss": 0.6987,
+ "step": 1946
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4931614761828182e-06,
+ "loss": 0.7146,
+ "step": 1947
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.484622966685892e-06,
+ "loss": 0.6451,
+ "step": 1948
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.476097028930058e-06,
+ "loss": 0.6099,
+ "step": 1949
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4675836771775265e-06,
+ "loss": 0.6228,
+ "step": 1950
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4590829256694615e-06,
+ "loss": 0.6676,
+ "step": 1951
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.450594788625932e-06,
+ "loss": 0.6932,
+ "step": 1952
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.442119280245917e-06,
+ "loss": 0.6441,
+ "step": 1953
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.433656414707272e-06,
+ "loss": 0.6197,
+ "step": 1954
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.425206206166688e-06,
+ "loss": 0.7197,
+ "step": 1955
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.416768668759705e-06,
+ "loss": 0.634,
+ "step": 1956
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.408343816600657e-06,
+ "loss": 0.6447,
+ "step": 1957
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3999316637826486e-06,
+ "loss": 0.7257,
+ "step": 1958
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3915322243775564e-06,
+ "loss": 0.6977,
+ "step": 1959
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3831455124359793e-06,
+ "loss": 0.6331,
+ "step": 1960
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.374771541987232e-06,
+ "loss": 0.7292,
+ "step": 1961
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3664103270393123e-06,
+ "loss": 0.6897,
+ "step": 1962
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.358061881578885e-06,
+ "loss": 0.6355,
+ "step": 1963
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.349726219571239e-06,
+ "loss": 0.6194,
+ "step": 1964
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3414033549603022e-06,
+ "loss": 0.6389,
+ "step": 1965
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3330933016685753e-06,
+ "loss": 0.6348,
+ "step": 1966
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3247960735971385e-06,
+ "loss": 0.6179,
+ "step": 1967
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3165116846256154e-06,
+ "loss": 0.6648,
+ "step": 1968
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.308240148612152e-06,
+ "loss": 0.6234,
+ "step": 1969
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.299981479393393e-06,
+ "loss": 0.7046,
+ "step": 1970
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.291735690784461e-06,
+ "loss": 0.6598,
+ "step": 1971
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2835027965789345e-06,
+ "loss": 0.6211,
+ "step": 1972
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.275282810548811e-06,
+ "loss": 0.6727,
+ "step": 1973
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2670757464445146e-06,
+ "loss": 0.5755,
+ "step": 1974
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2588816179948324e-06,
+ "loss": 0.6312,
+ "step": 1975
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2507004389069267e-06,
+ "loss": 0.6871,
+ "step": 1976
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2425322228662914e-06,
+ "loss": 0.6115,
+ "step": 1977
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2343769835367425e-06,
+ "loss": 0.6683,
+ "step": 1978
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.226234734560381e-06,
+ "loss": 0.7137,
+ "step": 1979
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2181054895575847e-06,
+ "loss": 0.7133,
+ "step": 1980
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2099892621269705e-06,
+ "loss": 0.68,
+ "step": 1981
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.20188606584538e-06,
+ "loss": 0.5903,
+ "step": 1982
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.193795914267871e-06,
+ "loss": 0.6412,
+ "step": 1983
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1857188209276605e-06,
+ "loss": 0.702,
+ "step": 1984
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1776547993361306e-06,
+ "loss": 0.7083,
+ "step": 1985
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.169603862982801e-06,
+ "loss": 0.6156,
+ "step": 1986
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1615660253352888e-06,
+ "loss": 0.6911,
+ "step": 1987
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.153541299839318e-06,
+ "loss": 0.7508,
+ "step": 1988
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.145529699918666e-06,
+ "loss": 0.6208,
+ "step": 1989
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1375312389751546e-06,
+ "loss": 0.6472,
+ "step": 1990
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.129545930388627e-06,
+ "loss": 0.623,
+ "step": 1991
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.121573787516936e-06,
+ "loss": 0.6577,
+ "step": 1992
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.113614823695892e-06,
+ "loss": 0.592,
+ "step": 1993
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.105669052239274e-06,
+ "loss": 0.6801,
+ "step": 1994
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0977364864387896e-06,
+ "loss": 0.7309,
+ "step": 1995
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0898171395640463e-06,
+ "loss": 0.7216,
+ "step": 1996
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.081911024862554e-06,
+ "loss": 0.6031,
+ "step": 1997
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.074018155559683e-06,
+ "loss": 0.6042,
+ "step": 1998
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.066138544858637e-06,
+ "loss": 0.6504,
+ "step": 1999
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0582722059404515e-06,
+ "loss": 0.6789,
+ "step": 2000
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.050419151963957e-06,
+ "loss": 0.7141,
+ "step": 2001
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0425793960657623e-06,
+ "loss": 0.6731,
+ "step": 2002
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0347529513602305e-06,
+ "loss": 0.612,
+ "step": 2003
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0269398309394585e-06,
+ "loss": 0.6617,
+ "step": 2004
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.019140047873246e-06,
+ "loss": 0.6636,
+ "step": 2005
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0113536152091006e-06,
+ "loss": 0.6444,
+ "step": 2006
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0035805459721768e-06,
+ "loss": 0.6037,
+ "step": 2007
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9958208531652876e-06,
+ "loss": 0.6465,
+ "step": 2008
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9880745497688658e-06,
+ "loss": 0.7299,
+ "step": 2009
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9803416487409465e-06,
+ "loss": 0.7021,
+ "step": 2010
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.972622163017145e-06,
+ "loss": 0.6421,
+ "step": 2011
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.964916105510637e-06,
+ "loss": 0.6207,
+ "step": 2012
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9572234891121354e-06,
+ "loss": 0.6576,
+ "step": 2013
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9495443266898615e-06,
+ "loss": 0.7114,
+ "step": 2014
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9418786310895467e-06,
+ "loss": 0.6801,
+ "step": 2015
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.934226415134377e-06,
+ "loss": 0.6593,
+ "step": 2016
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9265876916250026e-06,
+ "loss": 0.7132,
+ "step": 2017
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.918962473339496e-06,
+ "loss": 0.6167,
+ "step": 2018
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9113507730333435e-06,
+ "loss": 0.6605,
+ "step": 2019
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9037526034394149e-06,
+ "loss": 0.656,
+ "step": 2020
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8961679772679486e-06,
+ "loss": 0.7214,
+ "step": 2021
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8885969072065224e-06,
+ "loss": 0.6643,
+ "step": 2022
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.88103940592004e-06,
+ "loss": 0.7082,
+ "step": 2023
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.873495486050716e-06,
+ "loss": 0.5961,
+ "step": 2024
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8659651602180295e-06,
+ "loss": 0.6705,
+ "step": 2025
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.858448441018732e-06,
+ "loss": 0.7354,
+ "step": 2026
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8509453410268086e-06,
+ "loss": 0.6732,
+ "step": 2027
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8434558727934636e-06,
+ "loss": 0.725,
+ "step": 2028
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.835980048847098e-06,
+ "loss": 0.6441,
+ "step": 2029
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8285178816932913e-06,
+ "loss": 0.6889,
+ "step": 2030
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8210693838147686e-06,
+ "loss": 0.6496,
+ "step": 2031
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8136345676713962e-06,
+ "loss": 0.6006,
+ "step": 2032
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8062134457001612e-06,
+ "loss": 0.6977,
+ "step": 2033
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7988060303151256e-06,
+ "loss": 0.6421,
+ "step": 2034
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7914123339074351e-06,
+ "loss": 0.6494,
+ "step": 2035
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7840323688452833e-06,
+ "loss": 0.7305,
+ "step": 2036
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7766661474738933e-06,
+ "loss": 0.6692,
+ "step": 2037
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7693136821154989e-06,
+ "loss": 0.682,
+ "step": 2038
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7619749850693235e-06,
+ "loss": 0.6282,
+ "step": 2039
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7546500686115542e-06,
+ "loss": 0.7179,
+ "step": 2040
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7473389449953304e-06,
+ "loss": 0.695,
+ "step": 2041
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7400416264507192e-06,
+ "loss": 0.6979,
+ "step": 2042
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7327581251846902e-06,
+ "loss": 0.6697,
+ "step": 2043
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7254884533811034e-06,
+ "loss": 0.6546,
+ "step": 2044
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7182326232006875e-06,
+ "loss": 0.6974,
+ "step": 2045
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7109906467810033e-06,
+ "loss": 0.7039,
+ "step": 2046
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7037625362364585e-06,
+ "loss": 0.6412,
+ "step": 2047
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6965483036582454e-06,
+ "loss": 0.6686,
+ "step": 2048
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6893479611143527e-06,
+ "loss": 0.6928,
+ "step": 2049
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6821615206495313e-06,
+ "loss": 0.736,
+ "step": 2050
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.674988994285276e-06,
+ "loss": 0.6696,
+ "step": 2051
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.667830394019807e-06,
+ "loss": 0.654,
+ "step": 2052
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6606857318280478e-06,
+ "loss": 0.7111,
+ "step": 2053
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6535550196616102e-06,
+ "loss": 0.6551,
+ "step": 2054
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6464382694487579e-06,
+ "loss": 0.5837,
+ "step": 2055
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6393354930944194e-06,
+ "loss": 0.7418,
+ "step": 2056
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6322467024801282e-06,
+ "loss": 0.6975,
+ "step": 2057
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6251719094640328e-06,
+ "loss": 0.6251,
+ "step": 2058
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6181111258808658e-06,
+ "loss": 0.6403,
+ "step": 2059
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.611064363541922e-06,
+ "loss": 0.6604,
+ "step": 2060
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6040316342350425e-06,
+ "loss": 0.6438,
+ "step": 2061
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.597012949724598e-06,
+ "loss": 0.5204,
+ "step": 2062
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.590008321751454e-06,
+ "loss": 0.7092,
+ "step": 2063
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5830177620329712e-06,
+ "loss": 0.6708,
+ "step": 2064
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5760412822629822e-06,
+ "loss": 0.6809,
+ "step": 2065
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5690788941117508e-06,
+ "loss": 0.6494,
+ "step": 2066
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5621306092259804e-06,
+ "loss": 0.6745,
+ "step": 2067
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5551964392287788e-06,
+ "loss": 0.6433,
+ "step": 2068
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5482763957196423e-06,
+ "loss": 0.6314,
+ "step": 2069
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5413704902744364e-06,
+ "loss": 0.6592,
+ "step": 2070
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5344787344453803e-06,
+ "loss": 0.7146,
+ "step": 2071
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5276011397610136e-06,
+ "loss": 0.6831,
+ "step": 2072
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.520737717726195e-06,
+ "loss": 0.5945,
+ "step": 2073
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5138884798220798e-06,
+ "loss": 0.6536,
+ "step": 2074
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.507053437506083e-06,
+ "loss": 0.7101,
+ "step": 2075
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.500232602211883e-06,
+ "loss": 0.682,
+ "step": 2076
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4934259853493883e-06,
+ "loss": 0.7251,
+ "step": 2077
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4866335983047264e-06,
+ "loss": 0.7008,
+ "step": 2078
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4798554524402176e-06,
+ "loss": 0.6074,
+ "step": 2079
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4730915590943628e-06,
+ "loss": 0.7027,
+ "step": 2080
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.466341929581816e-06,
+ "loss": 0.6206,
+ "step": 2081
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4596065751933763e-06,
+ "loss": 0.6774,
+ "step": 2082
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.452885507195959e-06,
+ "loss": 0.69,
+ "step": 2083
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4461787368325863e-06,
+ "loss": 0.6903,
+ "step": 2084
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.439486275322357e-06,
+ "loss": 0.6897,
+ "step": 2085
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4328081338604383e-06,
+ "loss": 0.7265,
+ "step": 2086
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4261443236180418e-06,
+ "loss": 0.7098,
+ "step": 2087
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4194948557424083e-06,
+ "loss": 0.6815,
+ "step": 2088
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.412859741356778e-06,
+ "loss": 0.6816,
+ "step": 2089
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4062389915603903e-06,
+ "loss": 0.6108,
+ "step": 2090
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.3996326174284502e-06,
+ "loss": 0.6165,
+ "step": 2091
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.393040630012118e-06,
+ "loss": 0.6931,
+ "step": 2092
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.386463040338485e-06,
+ "loss": 0.671,
+ "step": 2093
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3798998594105606e-06,
+ "loss": 0.7149,
+ "step": 2094
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3733510982072485e-06,
+ "loss": 0.6887,
+ "step": 2095
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3668167676833332e-06,
+ "loss": 0.6379,
+ "step": 2096
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3602968787694615e-06,
+ "loss": 0.6807,
+ "step": 2097
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3537914423721144e-06,
+ "loss": 0.6977,
+ "step": 2098
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3473004693736037e-06,
+ "loss": 0.5973,
+ "step": 2099
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3408239706320459e-06,
+ "loss": 0.7183,
+ "step": 2100
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3343619569813437e-06,
+ "loss": 0.6928,
+ "step": 2101
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3279144392311693e-06,
+ "loss": 0.6042,
+ "step": 2102
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3214814281669486e-06,
+ "loss": 0.5753,
+ "step": 2103
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.31506293454983e-06,
+ "loss": 0.657,
+ "step": 2104
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.308658969116694e-06,
+ "loss": 0.6403,
+ "step": 2105
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.302269542580109e-06,
+ "loss": 0.6782,
+ "step": 2106
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2958946656283188e-06,
+ "loss": 0.6893,
+ "step": 2107
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2895343489252355e-06,
+ "loss": 0.608,
+ "step": 2108
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2831886031104123e-06,
+ "loss": 0.6475,
+ "step": 2109
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2768574387990284e-06,
+ "loss": 0.6816,
+ "step": 2110
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2705408665818707e-06,
+ "loss": 0.6435,
+ "step": 2111
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2642388970253194e-06,
+ "loss": 0.6404,
+ "step": 2112
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2579515406713194e-06,
+ "loss": 0.6658,
+ "step": 2113
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2516788080373766e-06,
+ "loss": 0.6264,
+ "step": 2114
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2454207096165405e-06,
+ "loss": 0.6101,
+ "step": 2115
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.239177255877365e-06,
+ "loss": 0.706,
+ "step": 2116
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2329484572639183e-06,
+ "loss": 0.6659,
+ "step": 2117
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2267343241957508e-06,
+ "loss": 0.7166,
+ "step": 2118
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2205348670678774e-06,
+ "loss": 0.6509,
+ "step": 2119
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.214350096250767e-06,
+ "loss": 0.6779,
+ "step": 2120
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2081800220903207e-06,
+ "loss": 0.7028,
+ "step": 2121
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2020246549078497e-06,
+ "loss": 0.7126,
+ "step": 2122
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1958840050000686e-06,
+ "loss": 0.6249,
+ "step": 2123
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.189758082639072e-06,
+ "loss": 0.6544,
+ "step": 2124
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.183646898072318e-06,
+ "loss": 0.7085,
+ "step": 2125
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1775504615226118e-06,
+ "loss": 0.6763,
+ "step": 2126
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1714687831880867e-06,
+ "loss": 0.7043,
+ "step": 2127
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.165401873242188e-06,
+ "loss": 0.6542,
+ "step": 2128
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1593497418336619e-06,
+ "loss": 0.6597,
+ "step": 2129
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1533123990865225e-06,
+ "loss": 0.6377,
+ "step": 2130
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1472898551000544e-06,
+ "loss": 0.6333,
+ "step": 2131
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1412821199487845e-06,
+ "loss": 0.6633,
+ "step": 2132
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.135289203682466e-06,
+ "loss": 0.6866,
+ "step": 2133
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1293111163260639e-06,
+ "loss": 0.61,
+ "step": 2134
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.123347867879737e-06,
+ "loss": 0.7057,
+ "step": 2135
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.117399468318824e-06,
+ "loss": 0.674,
+ "step": 2136
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1114659275938188e-06,
+ "loss": 0.6854,
+ "step": 2137
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.105547255630368e-06,
+ "loss": 0.6599,
+ "step": 2138
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.099643462329235e-06,
+ "loss": 0.6964,
+ "step": 2139
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0937545575663023e-06,
+ "loss": 0.7383,
+ "step": 2140
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0878805511925438e-06,
+ "loss": 0.5789,
+ "step": 2141
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0820214530340124e-06,
+ "loss": 0.6805,
+ "step": 2142
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0761772728918208e-06,
+ "loss": 0.604,
+ "step": 2143
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0703480205421302e-06,
+ "loss": 0.6399,
+ "step": 2144
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0645337057361215e-06,
+ "loss": 0.6316,
+ "step": 2145
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0587343382000027e-06,
+ "loss": 0.6771,
+ "step": 2146
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0529499276349686e-06,
+ "loss": 0.5985,
+ "step": 2147
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0471804837171916e-06,
+ "loss": 0.5688,
+ "step": 2148
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0414260160978141e-06,
+ "loss": 0.6675,
+ "step": 2149
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0356865344029231e-06,
+ "loss": 0.4872,
+ "step": 2150
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.029962048233537e-06,
+ "loss": 0.6499,
+ "step": 2151
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0242525671655912e-06,
+ "loss": 0.6114,
+ "step": 2152
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0185581007499212e-06,
+ "loss": 0.6791,
+ "step": 2153
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0128786585122385e-06,
+ "loss": 0.6663,
+ "step": 2154
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0072142499531346e-06,
+ "loss": 0.6714,
+ "step": 2155
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0015648845480453e-06,
+ "loss": 0.5984,
+ "step": 2156
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.95930571747239e-07,
+ "loss": 0.6338,
+ "step": 2157
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.903113209758098e-07,
+ "loss": 0.6961,
+ "step": 2158
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.84707141633654e-07,
+ "loss": 0.6289,
+ "step": 2159
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.791180430954562e-07,
+ "loss": 0.6797,
+ "step": 2160
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.735440347106762e-07,
+ "loss": 0.657,
+ "step": 2161
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.679851258035277e-07,
+ "loss": 0.6761,
+ "step": 2162
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.624413256729636e-07,
+ "loss": 0.5793,
+ "step": 2163
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.569126435926735e-07,
+ "loss": 0.7171,
+ "step": 2164
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.513990888110447e-07,
+ "loss": 0.6404,
+ "step": 2165
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.459006705511664e-07,
+ "loss": 0.6293,
+ "step": 2166
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.404173980108067e-07,
+ "loss": 0.6029,
+ "step": 2167
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.349492803623972e-07,
+ "loss": 0.7179,
+ "step": 2168
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.294963267530177e-07,
+ "loss": 0.7553,
+ "step": 2169
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.240585463043849e-07,
+ "loss": 0.6711,
+ "step": 2170
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.186359481128282e-07,
+ "loss": 0.617,
+ "step": 2171
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.132285412492825e-07,
+ "loss": 0.6651,
+ "step": 2172
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.078363347592767e-07,
+ "loss": 0.6526,
+ "step": 2173
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.024593376629009e-07,
+ "loss": 0.6313,
+ "step": 2174
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.970975589548114e-07,
+ "loss": 0.7111,
+ "step": 2175
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.917510076042058e-07,
+ "loss": 0.6422,
+ "step": 2176
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.864196925548063e-07,
+ "loss": 0.5693,
+ "step": 2177
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.811036227248515e-07,
+ "loss": 0.6831,
+ "step": 2178
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.758028070070768e-07,
+ "loss": 0.7087,
+ "step": 2179
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.705172542686968e-07,
+ "loss": 0.6272,
+ "step": 2180
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.652469733513958e-07,
+ "loss": 0.7038,
+ "step": 2181
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.599919730713191e-07,
+ "loss": 0.6926,
+ "step": 2182
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.547522622190385e-07,
+ "loss": 0.5546,
+ "step": 2183
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.495278495595572e-07,
+ "loss": 0.6164,
+ "step": 2184
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.443187438322864e-07,
+ "loss": 0.6372,
+ "step": 2185
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.391249537510271e-07,
+ "loss": 0.6319,
+ "step": 2186
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.339464880039671e-07,
+ "loss": 0.6691,
+ "step": 2187
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.287833552536584e-07,
+ "loss": 0.6809,
+ "step": 2188
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.236355641369975e-07,
+ "loss": 0.7157,
+ "step": 2189
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.185031232652252e-07,
+ "loss": 0.604,
+ "step": 2190
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.133860412238992e-07,
+ "loss": 0.5792,
+ "step": 2191
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.082843265728879e-07,
+ "loss": 0.6621,
+ "step": 2192
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.031979878463525e-07,
+ "loss": 0.7521,
+ "step": 2193
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.981270335527347e-07,
+ "loss": 0.6543,
+ "step": 2194
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.930714721747323e-07,
+ "loss": 0.6123,
+ "step": 2195
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.880313121693073e-07,
+ "loss": 0.655,
+ "step": 2196
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.830065619676519e-07,
+ "loss": 0.6042,
+ "step": 2197
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.77997229975177e-07,
+ "loss": 0.5945,
+ "step": 2198
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.730033245715063e-07,
+ "loss": 0.6652,
+ "step": 2199
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.680248541104574e-07,
+ "loss": 0.7107,
+ "step": 2200
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.630618269200285e-07,
+ "loss": 0.6981,
+ "step": 2201
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.58114251302382e-07,
+ "loss": 0.6331,
+ "step": 2202
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.531821355338386e-07,
+ "loss": 0.607,
+ "step": 2203
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.482654878648465e-07,
+ "loss": 0.6728,
+ "step": 2204
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.433643165199933e-07,
+ "loss": 0.6286,
+ "step": 2205
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.384786296979662e-07,
+ "loss": 0.6141,
+ "step": 2206
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.336084355715555e-07,
+ "loss": 0.6296,
+ "step": 2207
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.287537422876323e-07,
+ "loss": 0.5949,
+ "step": 2208
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.239145579671414e-07,
+ "loss": 0.7017,
+ "step": 2209
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.190908907050808e-07,
+ "loss": 0.6897,
+ "step": 2210
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.142827485704951e-07,
+ "loss": 0.6397,
+ "step": 2211
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.094901396064535e-07,
+ "loss": 0.6906,
+ "step": 2212
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.047130718300433e-07,
+ "loss": 0.6939,
+ "step": 2213
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.999515532323598e-07,
+ "loss": 0.5709,
+ "step": 2214
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.952055917784783e-07,
+ "loss": 0.6692,
+ "step": 2215
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.904751954074574e-07,
+ "loss": 0.7554,
+ "step": 2216
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.857603720323136e-07,
+ "loss": 0.6861,
+ "step": 2217
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.810611295400171e-07,
+ "loss": 0.7128,
+ "step": 2218
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.76377475791471e-07,
+ "loss": 0.666,
+ "step": 2219
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.717094186215056e-07,
+ "loss": 0.6745,
+ "step": 2220
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.670569658388537e-07,
+ "loss": 0.7639,
+ "step": 2221
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.624201252261497e-07,
+ "loss": 0.6321,
+ "step": 2222
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.57798904539918e-07,
+ "loss": 0.6357,
+ "step": 2223
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.531933115105427e-07,
+ "loss": 0.6821,
+ "step": 2224
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.48603353842272e-07,
+ "loss": 0.6015,
+ "step": 2225
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.440290392131998e-07,
+ "loss": 0.6954,
+ "step": 2226
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.394703752752474e-07,
+ "loss": 0.6774,
+ "step": 2227
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.349273696541625e-07,
+ "loss": 0.6292,
+ "step": 2228
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.304000299494961e-07,
+ "loss": 0.6201,
+ "step": 2229
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.258883637345914e-07,
+ "loss": 0.6545,
+ "step": 2230
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.213923785565723e-07,
+ "loss": 0.6802,
+ "step": 2231
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.169120819363406e-07,
+ "loss": 0.6837,
+ "step": 2232
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.124474813685422e-07,
+ "loss": 0.6231,
+ "step": 2233
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.079985843215719e-07,
+ "loss": 0.6897,
+ "step": 2234
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.0356539823756e-07,
+ "loss": 0.6794,
+ "step": 2235
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.991479305323433e-07,
+ "loss": 0.6965,
+ "step": 2236
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.947461885954798e-07,
+ "loss": 0.6398,
+ "step": 2237
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.903601797902126e-07,
+ "loss": 0.728,
+ "step": 2238
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.859899114534662e-07,
+ "loss": 0.6618,
+ "step": 2239
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.816353908958361e-07,
+ "loss": 0.6866,
+ "step": 2240
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.772966254015755e-07,
+ "loss": 0.648,
+ "step": 2241
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.729736222285832e-07,
+ "loss": 0.7032,
+ "step": 2242
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.686663886083877e-07,
+ "loss": 0.6447,
+ "step": 2243
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.643749317461422e-07,
+ "loss": 0.7009,
+ "step": 2244
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.600992588206011e-07,
+ "loss": 0.6617,
+ "step": 2245
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.558393769841286e-07,
+ "loss": 0.6219,
+ "step": 2246
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.51595293362659e-07,
+ "loss": 0.7517,
+ "step": 2247
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.473670150557076e-07,
+ "loss": 0.6393,
+ "step": 2248
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.431545491363488e-07,
+ "loss": 0.6904,
+ "step": 2249
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.389579026512059e-07,
+ "loss": 0.6536,
+ "step": 2250
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.347770826204401e-07,
+ "loss": 0.7413,
+ "step": 2251
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.306120960377381e-07,
+ "loss": 0.6713,
+ "step": 2252
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.264629498702966e-07,
+ "loss": 0.6387,
+ "step": 2253
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.223296510588182e-07,
+ "loss": 0.6021,
+ "step": 2254
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.182122065174988e-07,
+ "loss": 0.6287,
+ "step": 2255
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.141106231340065e-07,
+ "loss": 0.619,
+ "step": 2256
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.100249077694797e-07,
+ "loss": 0.6626,
+ "step": 2257
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.059550672585145e-07,
+ "loss": 0.6304,
+ "step": 2258
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.019011084091496e-07,
+ "loss": 0.6665,
+ "step": 2259
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.978630380028582e-07,
+ "loss": 0.6583,
+ "step": 2260
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.938408627945335e-07,
+ "loss": 0.6312,
+ "step": 2261
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.898345895124801e-07,
+ "loss": 0.7087,
+ "step": 2262
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.858442248583995e-07,
+ "loss": 0.6372,
+ "step": 2263
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.818697755073876e-07,
+ "loss": 0.6578,
+ "step": 2264
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.779112481079107e-07,
+ "loss": 0.5806,
+ "step": 2265
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7396864928180296e-07,
+ "loss": 0.6246,
+ "step": 2266
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.700419856242555e-07,
+ "loss": 0.6739,
+ "step": 2267
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6613126370379627e-07,
+ "loss": 0.7274,
+ "step": 2268
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6223649006229553e-07,
+ "loss": 0.7161,
+ "step": 2269
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.583576712149385e-07,
+ "loss": 0.5911,
+ "step": 2270
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5449481365022143e-07,
+ "loss": 0.6127,
+ "step": 2271
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5064792382994216e-07,
+ "loss": 0.6637,
+ "step": 2272
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.468170081891898e-07,
+ "loss": 0.6858,
+ "step": 2273
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4300207313632713e-07,
+ "loss": 0.6407,
+ "step": 2274
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.392031250529871e-07,
+ "loss": 0.6047,
+ "step": 2275
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.3542017029406083e-07,
+ "loss": 0.6109,
+ "step": 2276
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.316532151876807e-07,
+ "loss": 0.7114,
+ "step": 2277
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.279022660352228e-07,
+ "loss": 0.7097,
+ "step": 2278
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2416732911128247e-07,
+ "loss": 0.7146,
+ "step": 2279
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2044841066367084e-07,
+ "loss": 0.6759,
+ "step": 2280
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.167455169134027e-07,
+ "loss": 0.6919,
+ "step": 2281
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.130586540546899e-07,
+ "loss": 0.712,
+ "step": 2282
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.093878282549246e-07,
+ "loss": 0.664,
+ "step": 2283
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.057330456546737e-07,
+ "loss": 0.7283,
+ "step": 2284
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0209431236766793e-07,
+ "loss": 0.6375,
+ "step": 2285
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9847163448078373e-07,
+ "loss": 0.6879,
+ "step": 2286
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9486501805405253e-07,
+ "loss": 0.6658,
+ "step": 2287
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9127446912062606e-07,
+ "loss": 0.6092,
+ "step": 2288
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.876999936867865e-07,
+ "loss": 0.6272,
+ "step": 2289
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8414159773192294e-07,
+ "loss": 0.6363,
+ "step": 2290
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8059928720852957e-07,
+ "loss": 0.6303,
+ "step": 2291
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.770730680421919e-07,
+ "loss": 0.6455,
+ "step": 2292
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.735629461315804e-07,
+ "loss": 0.6015,
+ "step": 2293
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.7006892734843145e-07,
+ "loss": 0.6662,
+ "step": 2294
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6659101753754975e-07,
+ "loss": 0.6541,
+ "step": 2295
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6312922251679373e-07,
+ "loss": 0.6433,
+ "step": 2296
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5968354807705994e-07,
+ "loss": 0.6884,
+ "step": 2297
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5625399998228116e-07,
+ "loss": 0.6326,
+ "step": 2298
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.528405839694149e-07,
+ "loss": 0.6382,
+ "step": 2299
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4944330574843144e-07,
+ "loss": 0.6756,
+ "step": 2300
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.46062171002306e-07,
+ "loss": 0.7071,
+ "step": 2301
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.426971853870109e-07,
+ "loss": 0.5923,
+ "step": 2302
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3934835453149797e-07,
+ "loss": 0.6816,
+ "step": 2303
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.360156840377027e-07,
+ "loss": 0.6557,
+ "step": 2304
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3269917948052454e-07,
+ "loss": 0.7046,
+ "step": 2305
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.293988464078202e-07,
+ "loss": 0.6715,
+ "step": 2306
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2611469034039334e-07,
+ "loss": 0.6726,
+ "step": 2307
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2284671677199065e-07,
+ "loss": 0.6662,
+ "step": 2308
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.1959493116928473e-07,
+ "loss": 0.5986,
+ "step": 2309
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.163593389718711e-07,
+ "loss": 0.6844,
+ "step": 2310
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.131399455922568e-07,
+ "loss": 0.6355,
+ "step": 2311
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.099367564158484e-07,
+ "loss": 0.5717,
+ "step": 2312
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0674977680095086e-07,
+ "loss": 0.6716,
+ "step": 2313
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.03579012078753e-07,
+ "loss": 0.6626,
+ "step": 2314
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.004244675533141e-07,
+ "loss": 0.6379,
+ "step": 2315
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.972861485015666e-07,
+ "loss": 0.6879,
+ "step": 2316
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.941640601732976e-07,
+ "loss": 0.6097,
+ "step": 2317
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.910582077911428e-07,
+ "loss": 0.5749,
+ "step": 2318
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8796859655058184e-07,
+ "loss": 0.7483,
+ "step": 2319
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8489523161992385e-07,
+ "loss": 0.6272,
+ "step": 2320
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.818381181402985e-07,
+ "loss": 0.6379,
+ "step": 2321
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.787972612256551e-07,
+ "loss": 0.6888,
+ "step": 2322
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7577266596274577e-07,
+ "loss": 0.648,
+ "step": 2323
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.727643374111222e-07,
+ "loss": 0.6322,
+ "step": 2324
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.697722806031211e-07,
+ "loss": 0.5976,
+ "step": 2325
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.667965005438644e-07,
+ "loss": 0.666,
+ "step": 2326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6383700221124355e-07,
+ "loss": 0.6301,
+ "step": 2327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6089379055591613e-07,
+ "loss": 0.6396,
+ "step": 2328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.579668705012917e-07,
+ "loss": 0.661,
+ "step": 2329
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5505624694353027e-07,
+ "loss": 0.6398,
+ "step": 2330
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5216192475152945e-07,
+ "loss": 0.6717,
+ "step": 2331
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.492839087669197e-07,
+ "loss": 0.6877,
+ "step": 2332
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.464222038040509e-07,
+ "loss": 0.7216,
+ "step": 2333
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.435768146499939e-07,
+ "loss": 0.6168,
+ "step": 2334
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.407477460645191e-07,
+ "loss": 0.6851,
+ "step": 2335
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3793500278009974e-07,
+ "loss": 0.6754,
+ "step": 2336
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3513858950190206e-07,
+ "loss": 0.659,
+ "step": 2337
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3235851090777084e-07,
+ "loss": 0.6741,
+ "step": 2338
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2959477164822942e-07,
+ "loss": 0.5922,
+ "step": 2339
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2684737634646737e-07,
+ "loss": 0.7035,
+ "step": 2340
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2411632959833285e-07,
+ "loss": 0.6523,
+ "step": 2341
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2140163597233033e-07,
+ "loss": 0.6883,
+ "step": 2342
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1870330000960504e-07,
+ "loss": 0.7065,
+ "step": 2343
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1602132622393745e-07,
+ "loss": 0.7037,
+ "step": 2344
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1335571910174214e-07,
+ "loss": 0.6573,
+ "step": 2345
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1070648310205443e-07,
+ "loss": 0.7099,
+ "step": 2346
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0807362265651943e-07,
+ "loss": 0.6879,
+ "step": 2347
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0545714216939515e-07,
+ "loss": 0.668,
+ "step": 2348
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0285704601753608e-07,
+ "loss": 0.6342,
+ "step": 2349
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0027333855038967e-07,
+ "loss": 0.6833,
+ "step": 2350
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9770602408998642e-07,
+ "loss": 0.6296,
+ "step": 2351
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9515510693093875e-07,
+ "loss": 0.7657,
+ "step": 2352
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9262059134042443e-07,
+ "loss": 0.7501,
+ "step": 2353
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9010248155818755e-07,
+ "loss": 0.6626,
+ "step": 2354
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8760078179653196e-07,
+ "loss": 0.5684,
+ "step": 2355
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8511549624030346e-07,
+ "loss": 0.6633,
+ "step": 2356
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8264662904689533e-07,
+ "loss": 0.6257,
+ "step": 2357
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8019418434623405e-07,
+ "loss": 0.659,
+ "step": 2358
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7775816624077458e-07,
+ "loss": 0.6905,
+ "step": 2359
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.753385788054962e-07,
+ "loss": 0.6208,
+ "step": 2360
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7293542608788905e-07,
+ "loss": 0.6461,
+ "step": 2361
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7054871210795188e-07,
+ "loss": 0.6978,
+ "step": 2362
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6817844085818768e-07,
+ "loss": 0.6073,
+ "step": 2363
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6582461630359149e-07,
+ "loss": 0.6644,
+ "step": 2364
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6348724238164583e-07,
+ "loss": 0.6375,
+ "step": 2365
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6116632300231638e-07,
+ "loss": 0.6882,
+ "step": 2366
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.58861862048042e-07,
+ "loss": 0.6882,
+ "step": 2367
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5657386337373237e-07,
+ "loss": 0.6964,
+ "step": 2368
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5430233080675593e-07,
+ "loss": 0.7335,
+ "step": 2369
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.520472681469376e-07,
+ "loss": 0.6552,
+ "step": 2370
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4980867916655205e-07,
+ "loss": 0.5718,
+ "step": 2371
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.475865676103161e-07,
+ "loss": 0.684,
+ "step": 2372
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4538093719538404e-07,
+ "loss": 0.6861,
+ "step": 2373
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.431917916113401e-07,
+ "loss": 0.6919,
+ "step": 2374
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4101913452019277e-07,
+ "loss": 0.6003,
+ "step": 2375
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3886296955636481e-07,
+ "loss": 0.7007,
+ "step": 2376
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3672330032669767e-07,
+ "loss": 0.6853,
+ "step": 2377
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3460013041043606e-07,
+ "loss": 0.5375,
+ "step": 2378
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.324934633592201e-07,
+ "loss": 0.6534,
+ "step": 2379
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.304033026970908e-07,
+ "loss": 0.6732,
+ "step": 2380
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.283296519204713e-07,
+ "loss": 0.6643,
+ "step": 2381
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2627251449817247e-07,
+ "loss": 0.665,
+ "step": 2382
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2423189387137713e-07,
+ "loss": 0.6163,
+ "step": 2383
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2220779345364143e-07,
+ "loss": 0.6731,
+ "step": 2384
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2020021663088244e-07,
+ "loss": 0.7193,
+ "step": 2385
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1820916676138384e-07,
+ "loss": 0.6111,
+ "step": 2386
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1623464717577804e-07,
+ "loss": 0.686,
+ "step": 2387
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1427666117704406e-07,
+ "loss": 0.7192,
+ "step": 2388
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1233521204050634e-07,
+ "loss": 0.746,
+ "step": 2389
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1041030301382705e-07,
+ "loss": 0.6617,
+ "step": 2390
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.085019373169971e-07,
+ "loss": 0.6608,
+ "step": 2391
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0661011814233624e-07,
+ "loss": 0.6668,
+ "step": 2392
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0473484865448524e-07,
+ "loss": 0.664,
+ "step": 2393
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0287613199039592e-07,
+ "loss": 0.6965,
+ "step": 2394
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0103397125933778e-07,
+ "loss": 0.6824,
+ "step": 2395
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.920836954288137e-08,
+ "loss": 0.6283,
+ "step": 2396
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.739932989489498e-08,
+ "loss": 0.5374,
+ "step": 2397
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.56068553415479e-08,
+ "loss": 0.684,
+ "step": 2398
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.383094888129274e-08,
+ "loss": 0.6537,
+ "step": 2399
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.207161348487315e-08,
+ "loss": 0.6553,
+ "step": 2400
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.032885209530717e-08,
+ "loss": 0.6023,
+ "step": 2401
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.860266762789283e-08,
+ "loss": 0.5616,
+ "step": 2402
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.689306297019362e-08,
+ "loss": 0.69,
+ "step": 2403
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.520004098204193e-08,
+ "loss": 0.5669,
+ "step": 2404
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.352360449552787e-08,
+ "loss": 0.6283,
+ "step": 2405
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.186375631499821e-08,
+ "loss": 0.662,
+ "step": 2406
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.0220499217053e-08,
+ "loss": 0.7026,
+ "step": 2407
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.859383595053449e-08,
+ "loss": 0.6369,
+ "step": 2408
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.69837692365305e-08,
+ "loss": 0.6336,
+ "step": 2409
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.539030176836549e-08,
+ "loss": 0.6336,
+ "step": 2410
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.381343621159275e-08,
+ "loss": 0.6453,
+ "step": 2411
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.2253175203999e-08,
+ "loss": 0.6473,
+ "step": 2412
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.070952135559195e-08,
+ "loss": 0.5641,
+ "step": 2413
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.918247724859939e-08,
+ "loss": 0.6278,
+ "step": 2414
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.767204543746463e-08,
+ "loss": 0.6486,
+ "step": 2415
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.617822844884325e-08,
+ "loss": 0.6329,
+ "step": 2416
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.470102878159301e-08,
+ "loss": 0.6609,
+ "step": 2417
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.324044890677839e-08,
+ "loss": 0.6526,
+ "step": 2418
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.179649126766274e-08,
+ "loss": 0.6036,
+ "step": 2419
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.036915827969947e-08,
+ "loss": 0.6614,
+ "step": 2420
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.895845233053643e-08,
+ "loss": 0.6672,
+ "step": 2421
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.7564375780004843e-08,
+ "loss": 0.6606,
+ "step": 2422
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.618693096011929e-08,
+ "loss": 0.6781,
+ "step": 2423
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.482612017507327e-08,
+ "loss": 0.5706,
+ "step": 2424
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.348194570123588e-08,
+ "loss": 0.7157,
+ "step": 2425
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.2154409787141815e-08,
+ "loss": 0.6054,
+ "step": 2426
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.084351465350024e-08,
+ "loss": 0.6804,
+ "step": 2427
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.954926249317815e-08,
+ "loss": 0.6925,
+ "step": 2428
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.8271655471202605e-08,
+ "loss": 0.7092,
+ "step": 2429
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.701069572475958e-08,
+ "loss": 0.6486,
+ "step": 2430
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.5766385363184006e-08,
+ "loss": 0.6973,
+ "step": 2431
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.453872646796309e-08,
+ "loss": 0.6327,
+ "step": 2432
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.3327721092726314e-08,
+ "loss": 0.7145,
+ "step": 2433
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.213337126324768e-08,
+ "loss": 0.6288,
+ "step": 2434
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.0955678977436796e-08,
+ "loss": 0.5696,
+ "step": 2435
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.979464620534113e-08,
+ "loss": 0.6202,
+ "step": 2436
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.8650274889139306e-08,
+ "loss": 0.6699,
+ "step": 2437
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.752256694313783e-08,
+ "loss": 0.6042,
+ "step": 2438
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.641152425376992e-08,
+ "loss": 0.6439,
+ "step": 2439
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.531714867959113e-08,
+ "loss": 0.6521,
+ "step": 2440
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.423944205127372e-08,
+ "loss": 0.5987,
+ "step": 2441
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.3178406171608946e-08,
+ "loss": 0.6039,
+ "step": 2442
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.213404281550148e-08,
+ "loss": 0.6815,
+ "step": 2443
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.1106353729962734e-08,
+ "loss": 0.6873,
+ "step": 2444
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.009534063411534e-08,
+ "loss": 0.6788,
+ "step": 2445
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.910100521918313e-08,
+ "loss": 0.6704,
+ "step": 2446
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.812334914849335e-08,
+ "loss": 0.6691,
+ "step": 2447
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.716237405747113e-08,
+ "loss": 0.6163,
+ "step": 2448
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.6218081553638363e-08,
+ "loss": 0.6384,
+ "step": 2449
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.5290473216610378e-08,
+ "loss": 0.6341,
+ "step": 2450
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.4379550598092604e-08,
+ "loss": 0.6459,
+ "step": 2451
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3485315221877246e-08,
+ "loss": 0.6754,
+ "step": 2452
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.2607768583843282e-08,
+ "loss": 0.6113,
+ "step": 2453
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.1746912151955346e-08,
+ "loss": 0.723,
+ "step": 2454
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.090274736625486e-08,
+ "loss": 0.6899,
+ "step": 2455
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.0075275638862247e-08,
+ "loss": 0.6525,
+ "step": 2456
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.9264498353974704e-08,
+ "loss": 0.6442,
+ "step": 2457
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.8470416867861775e-08,
+ "loss": 0.6604,
+ "step": 2458
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.769303250886534e-08,
+ "loss": 0.6239,
+ "step": 2459
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6932346577397396e-08,
+ "loss": 0.6802,
+ "step": 2460
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6188360345932297e-08,
+ "loss": 0.6145,
+ "step": 2461
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.546107505901451e-08,
+ "loss": 0.6744,
+ "step": 2462
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4750491933247513e-08,
+ "loss": 0.7096,
+ "step": 2463
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4056612157297145e-08,
+ "loss": 0.6737,
+ "step": 2464
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.3379436891886034e-08,
+ "loss": 0.6118,
+ "step": 2465
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2718967269795823e-08,
+ "loss": 0.5911,
+ "step": 2466
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2075204395860518e-08,
+ "loss": 0.7145,
+ "step": 2467
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.1448149346969806e-08,
+ "loss": 0.6964,
+ "step": 2468
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.083780317206129e-08,
+ "loss": 0.6643,
+ "step": 2469
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0244166892124929e-08,
+ "loss": 0.6715,
+ "step": 2470
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.667241500196378e-09,
+ "loss": 0.6742,
+ "step": 2471
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.107027961356985e-09,
+ "loss": 0.6682,
+ "step": 2472
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.563527212734902e-09,
+ "loss": 0.6754,
+ "step": 2473
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.036740163498425e-09,
+ "loss": 0.6764,
+ "step": 2474
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.526667694858214e-09,
+ "loss": 0.6687,
+ "step": 2475
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.033310660065074e-09,
+ "loss": 0.657,
+ "step": 2476
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.556669884408839e-09,
+ "loss": 0.5867,
+ "step": 2477
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.09674616521283e-09,
+ "loss": 0.6945,
+ "step": 2478
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.653540271841618e-09,
+ "loss": 0.6655,
+ "step": 2479
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.2270529456888155e-09,
+ "loss": 0.7287,
+ "step": 2480
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.817284900183738e-09,
+ "loss": 0.6885,
+ "step": 2481
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.424236820789185e-09,
+ "loss": 0.6974,
+ "step": 2482
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.047909364994773e-09,
+ "loss": 0.7312,
+ "step": 2483
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.688303162322493e-09,
+ "loss": 0.7359,
+ "step": 2484
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.3454188143233758e-09,
+ "loss": 0.6452,
+ "step": 2485
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.019256894575273e-09,
+ "loss": 0.649,
+ "step": 2486
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.7098179486806376e-09,
+ "loss": 0.675,
+ "step": 2487
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.4171024942720723e-09,
+ "loss": 0.6524,
+ "step": 2488
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.14111102100345e-09,
+ "loss": 0.6171,
+ "step": 2489
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.881843990554355e-09,
+ "loss": 0.7171,
+ "step": 2490
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.6393018366278601e-09,
+ "loss": 0.6227,
+ "step": 2491
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.4134849649471982e-09,
+ "loss": 0.6985,
+ "step": 2492
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.2043937532613126e-09,
+ "loss": 0.6793,
+ "step": 2493
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.0120285513381957e-09,
+ "loss": 0.6288,
+ "step": 2494
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.363896809659989e-10,
+ "loss": 0.7372,
+ "step": 2495
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 6.774774359541436e-10,
+ "loss": 0.6397,
+ "step": 2496
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 5.352920821311002e-10,
+ "loss": 0.6625,
+ "step": 2497
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.0983385734660875e-10,
+ "loss": 0.6531,
+ "step": 2498
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.011029714650171e-10,
+ "loss": 0.634,
+ "step": 2499
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.090996063741635e-10,
+ "loss": 0.6628,
+ "step": 2500
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.3382391597538403e-10,
+ "loss": 0.5861,
+ "step": 2501
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.527602619017415e-11,
+ "loss": 0.6826,
+ "step": 2502
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.3456034959078456e-11,
+ "loss": 0.5939,
+ "step": 2503
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.364012237249698e-12,
+ "loss": 0.5965,
+ "step": 2504
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0,
+ "loss": 0.6425,
+ "step": 2505
+ },
+ {
+ "epoch": 1.0,
+ "step": 2505,
+ "total_flos": 455294069243904.0,
+ "train_loss": 0.7084222881379956,
+ "train_runtime": 10664.7748,
+ "train_samples_per_second": 30.058,
+ "train_steps_per_second": 0.235
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 2505,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 50000,
+ "total_flos": 455294069243904.0,
+ "train_batch_size": 16,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/ICM-LLaVA-v1.6-13B/training_args.bin b/ICM-LLaVA-v1.6-13B/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..efaec2ab999d29d1901a90378f559f599c9c5b70
--- /dev/null
+++ b/ICM-LLaVA-v1.6-13B/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:5dddce588ec43d6961fa8cf9d006d10e267bcc8f88bc821b790ddbc7253c029b
+size 6776
diff --git a/ICM-LLaVA-v1.6-7B/config.json b/ICM-LLaVA-v1.6-7B/config.json
new file mode 100644
index 0000000000000000000000000000000000000000..dbe0c165e82639ec618788c237ccd09315bc235a
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/config.json
@@ -0,0 +1,73 @@
+{
+ "_name_or_path": "liuhaotian/llava-v1.6-vicuna-7b",
+ "architectures": [
+ "LlavaLlamaForCausalLM"
+ ],
+ "attention_bias": false,
+ "attention_dropout": 0.0,
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "freeze_mm_mlp_adapter": false,
+ "freeze_mm_vision_resampler": false,
+ "hidden_act": "silu",
+ "hidden_size": 4096,
+ "image_aspect_ratio": "pad",
+ "image_crop_resolution": 224,
+ "image_grid_pinpoints": [
+ [
+ 336,
+ 672
+ ],
+ [
+ 672,
+ 336
+ ],
+ [
+ 672,
+ 672
+ ],
+ [
+ 1008,
+ 336
+ ],
+ [
+ 336,
+ 1008
+ ]
+ ],
+ "image_split_resolution": 224,
+ "initializer_range": 0.02,
+ "intermediate_size": 11008,
+ "max_position_embeddings": 4096,
+ "mm_hidden_size": 1024,
+ "mm_patch_merge_type": "flat",
+ "mm_projector_lr": null,
+ "mm_projector_type": "mlp2x_gelu",
+ "mm_resampler_type": null,
+ "mm_use_im_patch_token": false,
+ "mm_use_im_start_end": false,
+ "mm_vision_select_feature": "patch",
+ "mm_vision_select_layer": -2,
+ "mm_vision_tower": "openai/clip-vit-large-patch14-336",
+ "mm_vision_tower_lr": 2e-06,
+ "model_type": "llava_llama",
+ "num_attention_heads": 32,
+ "num_hidden_layers": 32,
+ "num_key_value_heads": 32,
+ "pad_token_id": 0,
+ "pretraining_tp": 1,
+ "rms_norm_eps": 1e-05,
+ "rope_scaling": null,
+ "rope_theta": 10000.0,
+ "tie_word_embeddings": false,
+ "tokenizer_model_max_length": 2048,
+ "tokenizer_padding_side": "right",
+ "torch_dtype": "bfloat16",
+ "transformers_version": "4.37.2",
+ "tune_mm_mlp_adapter": false,
+ "tune_mm_vision_resampler": false,
+ "unfreeze_mm_vision_tower": true,
+ "use_cache": true,
+ "use_mm_proj": true,
+ "vocab_size": 32000
+}
diff --git a/ICM-LLaVA-v1.6-7B/generation_config.json b/ICM-LLaVA-v1.6-7B/generation_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..9f5b631657a21d1f230063134b29e5189407aa12
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/generation_config.json
@@ -0,0 +1,8 @@
+{
+ "attn_implementation": "sdpa",
+ "bos_token_id": 1,
+ "eos_token_id": 2,
+ "max_length": 4096,
+ "pad_token_id": 0,
+ "transformers_version": "4.37.2"
+}
diff --git a/ICM-LLaVA-v1.6-7B/model-00001-of-00003.safetensors b/ICM-LLaVA-v1.6-7B/model-00001-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..8e185629fdb3d93c532fe7be6e60b1571c3a566f
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/model-00001-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:e23eea5c6dda06921d4f9c634ec2ea841ccdc14e3e96eaf61390e895dc974bc3
+size 4938993632
diff --git a/ICM-LLaVA-v1.6-7B/model-00002-of-00003.safetensors b/ICM-LLaVA-v1.6-7B/model-00002-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..79a5b44d5acef9aa46fa637cf893b266a6a3453f
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/model-00002-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:d07f2fe1d821b5d1c3f10f358453b3b8817eab106501d81798bc770ef762938a
+size 4947390880
diff --git a/ICM-LLaVA-v1.6-7B/model-00003-of-00003.safetensors b/ICM-LLaVA-v1.6-7B/model-00003-of-00003.safetensors
new file mode 100644
index 0000000000000000000000000000000000000000..004f5d0b35be4e27cf91ff01fcca80a6d1b261db
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/model-00003-of-00003.safetensors
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:a692209c2a2374d85bdd01bae2787e9150e875806c8c2a52a3cf9d72bf3cb0cc
+size 4239524944
diff --git a/ICM-LLaVA-v1.6-7B/model.safetensors.index.json b/ICM-LLaVA-v1.6-7B/model.safetensors.index.json
new file mode 100644
index 0000000000000000000000000000000000000000..b526abddee37a3410b3a82ab0fb967fa1f10eb9c
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/model.safetensors.index.json
@@ -0,0 +1,694 @@
+{
+ "metadata": {
+ "total_size": 14125813760
+ },
+ "weight_map": {
+ "lm_head.weight": "model-00003-of-00003.safetensors",
+ "model.embed_tokens.weight": "model-00001-of-00003.safetensors",
+ "model.image_newline": "model-00001-of-00003.safetensors",
+ "model.layers.0.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.0.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.1.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.10.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.11.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.11.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.12.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.12.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.13.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.14.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.15.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.16.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.17.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.18.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.19.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.2.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.2.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.20.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.20.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.21.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.input_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.down_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.post_attention_layernorm.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.22.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.mlp.gate_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.mlp.up_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.23.self_attn.k_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.o_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.q_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.23.self_attn.v_proj.weight": "model-00002-of-00003.safetensors",
+ "model.layers.24.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.24.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.25.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.26.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.27.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.28.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.29.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.3.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.3.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.30.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.30.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.input_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.down_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.gate_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.mlp.up_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.post_attention_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.o_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.31.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.layers.4.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.4.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.5.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.6.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.7.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.8.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.input_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.down_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.gate_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.mlp.up_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.post_attention_layernorm.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.k_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.o_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.q_proj.weight": "model-00001-of-00003.safetensors",
+ "model.layers.9.self_attn.v_proj.weight": "model-00001-of-00003.safetensors",
+ "model.mm_projector.0.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.0.weight": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.bias": "model-00003-of-00003.safetensors",
+ "model.mm_projector.2.weight": "model-00003-of-00003.safetensors",
+ "model.norm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.class_embedding": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.patch_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.embeddings.position_embedding.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.0.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.1.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.10.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.11.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.12.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.13.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.14.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.15.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.16.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.17.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.18.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.19.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.2.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.20.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.21.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.22.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.23.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.3.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.4.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.5.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.6.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.7.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.8.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.layer_norm2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc1.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.mlp.fc2.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.k_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.out_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.q_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.encoder.layers.9.self_attn.v_proj.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.post_layernorm.weight": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.bias": "model-00003-of-00003.safetensors",
+ "model.vision_tower.vision_tower.vision_model.pre_layrnorm.weight": "model-00003-of-00003.safetensors"
+ }
+}
diff --git a/ICM-LLaVA-v1.6-7B/runs/Jan20_09-00-59_hk01dgx042/events.out.tfevents.1737335428.hk01dgx042.3861440.0 b/ICM-LLaVA-v1.6-7B/runs/Jan20_09-00-59_hk01dgx042/events.out.tfevents.1737335428.hk01dgx042.3861440.0
new file mode 100644
index 0000000000000000000000000000000000000000..2b6d265e781d54720f634e205fe2ae48197e1af3
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/runs/Jan20_09-00-59_hk01dgx042/events.out.tfevents.1737335428.hk01dgx042.3861440.0
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:11d45387f4d741e33120451ece3b2bc509c0bf88a710a28c0a41af608a3bf511
+size 399109
diff --git a/ICM-LLaVA-v1.6-7B/special_tokens_map.json b/ICM-LLaVA-v1.6-7B/special_tokens_map.json
new file mode 100644
index 0000000000000000000000000000000000000000..14761dcf1466dc232bd41de9c21d4c617b15755e
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/special_tokens_map.json
@@ -0,0 +1,24 @@
+{
+ "bos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "eos_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ },
+ "pad_token": "",
+ "unk_token": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false
+ }
+}
diff --git a/ICM-LLaVA-v1.6-7B/tokenizer.model b/ICM-LLaVA-v1.6-7B/tokenizer.model
new file mode 100644
index 0000000000000000000000000000000000000000..6c00c742ce03c627d6cd5b795984876fa49fa899
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/tokenizer.model
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
+size 499723
diff --git a/ICM-LLaVA-v1.6-7B/tokenizer_config.json b/ICM-LLaVA-v1.6-7B/tokenizer_config.json
new file mode 100644
index 0000000000000000000000000000000000000000..2d53c0f8edb049fa98763ee75652fafa68bf7f42
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/tokenizer_config.json
@@ -0,0 +1,42 @@
+{
+ "add_bos_token": true,
+ "add_eos_token": false,
+ "added_tokens_decoder": {
+ "0": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "1": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ },
+ "2": {
+ "content": "",
+ "lstrip": false,
+ "normalized": false,
+ "rstrip": false,
+ "single_word": false,
+ "special": true
+ }
+ },
+ "bos_token": "",
+ "clean_up_tokenization_spaces": false,
+ "eos_token": "",
+ "legacy": false,
+ "model_max_length": 2048,
+ "pad_token": "",
+ "padding_side": "right",
+ "sp_model_kwargs": {},
+ "spaces_between_special_tokens": false,
+ "tokenizer_class": "LlamaTokenizer",
+ "unk_token": "",
+ "use_default_system_prompt": false
+}
diff --git a/ICM-LLaVA-v1.6-7B/trainer_state.json b/ICM-LLaVA-v1.6-7B/trainer_state.json
new file mode 100644
index 0000000000000000000000000000000000000000..9f60833b5224d72e55b78a65e6b29a2247d48ecd
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/trainer_state.json
@@ -0,0 +1,15060 @@
+{
+ "best_metric": null,
+ "best_model_checkpoint": null,
+ "epoch": 1.0,
+ "eval_steps": 500,
+ "global_step": 2505,
+ "is_hyper_param_search": false,
+ "is_local_process_zero": true,
+ "is_world_process_zero": true,
+ "log_history": [
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.6315789473684213e-07,
+ "loss": 1.3278,
+ "step": 1
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 5.263157894736843e-07,
+ "loss": 1.4264,
+ "step": 2
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 7.894736842105263e-07,
+ "loss": 1.34,
+ "step": 3
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.0526315789473685e-06,
+ "loss": 1.2429,
+ "step": 4
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.3157894736842106e-06,
+ "loss": 1.3609,
+ "step": 5
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.5789473684210526e-06,
+ "loss": 1.3431,
+ "step": 6
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 1.8421052631578948e-06,
+ "loss": 1.3332,
+ "step": 7
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.105263157894737e-06,
+ "loss": 1.2221,
+ "step": 8
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.368421052631579e-06,
+ "loss": 1.2047,
+ "step": 9
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.631578947368421e-06,
+ "loss": 1.1103,
+ "step": 10
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 2.8947368421052634e-06,
+ "loss": 1.1253,
+ "step": 11
+ },
+ {
+ "epoch": 0.0,
+ "learning_rate": 3.157894736842105e-06,
+ "loss": 1.0961,
+ "step": 12
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.421052631578948e-06,
+ "loss": 0.9348,
+ "step": 13
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.6842105263157896e-06,
+ "loss": 0.9805,
+ "step": 14
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 3.947368421052632e-06,
+ "loss": 0.9165,
+ "step": 15
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.210526315789474e-06,
+ "loss": 1.0089,
+ "step": 16
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.473684210526316e-06,
+ "loss": 0.9608,
+ "step": 17
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 4.736842105263158e-06,
+ "loss": 0.8427,
+ "step": 18
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5e-06,
+ "loss": 0.9188,
+ "step": 19
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.263157894736842e-06,
+ "loss": 0.9612,
+ "step": 20
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.526315789473685e-06,
+ "loss": 0.8958,
+ "step": 21
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 5.789473684210527e-06,
+ "loss": 0.8499,
+ "step": 22
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.0526315789473685e-06,
+ "loss": 0.9264,
+ "step": 23
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.31578947368421e-06,
+ "loss": 0.8155,
+ "step": 24
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.578947368421054e-06,
+ "loss": 0.8222,
+ "step": 25
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 6.842105263157896e-06,
+ "loss": 0.8816,
+ "step": 26
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.1052631578947375e-06,
+ "loss": 0.8932,
+ "step": 27
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.368421052631579e-06,
+ "loss": 0.8398,
+ "step": 28
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.631578947368423e-06,
+ "loss": 0.8775,
+ "step": 29
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 7.894736842105265e-06,
+ "loss": 0.8774,
+ "step": 30
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.157894736842106e-06,
+ "loss": 0.8957,
+ "step": 31
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.421052631578948e-06,
+ "loss": 0.8374,
+ "step": 32
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.68421052631579e-06,
+ "loss": 0.8443,
+ "step": 33
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 8.947368421052632e-06,
+ "loss": 0.8535,
+ "step": 34
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.210526315789474e-06,
+ "loss": 0.8095,
+ "step": 35
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.473684210526315e-06,
+ "loss": 0.7999,
+ "step": 36
+ },
+ {
+ "epoch": 0.01,
+ "learning_rate": 9.736842105263159e-06,
+ "loss": 0.665,
+ "step": 37
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1e-05,
+ "loss": 0.7839,
+ "step": 38
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0263157894736844e-05,
+ "loss": 0.8421,
+ "step": 39
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0526315789473684e-05,
+ "loss": 0.8224,
+ "step": 40
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.0789473684210528e-05,
+ "loss": 0.7589,
+ "step": 41
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.105263157894737e-05,
+ "loss": 0.8174,
+ "step": 42
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1315789473684212e-05,
+ "loss": 0.7783,
+ "step": 43
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1578947368421053e-05,
+ "loss": 0.7085,
+ "step": 44
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.1842105263157895e-05,
+ "loss": 0.7278,
+ "step": 45
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2105263157894737e-05,
+ "loss": 0.8025,
+ "step": 46
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.236842105263158e-05,
+ "loss": 0.8135,
+ "step": 47
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.263157894736842e-05,
+ "loss": 0.8356,
+ "step": 48
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.2894736842105264e-05,
+ "loss": 0.8227,
+ "step": 49
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3157894736842108e-05,
+ "loss": 0.834,
+ "step": 50
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3421052631578948e-05,
+ "loss": 0.8234,
+ "step": 51
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3684210526315791e-05,
+ "loss": 0.6931,
+ "step": 52
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.3947368421052631e-05,
+ "loss": 0.7776,
+ "step": 53
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4210526315789475e-05,
+ "loss": 0.7881,
+ "step": 54
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4473684210526317e-05,
+ "loss": 0.7127,
+ "step": 55
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.4736842105263159e-05,
+ "loss": 0.7625,
+ "step": 56
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5000000000000002e-05,
+ "loss": 0.7945,
+ "step": 57
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5263157894736846e-05,
+ "loss": 0.7825,
+ "step": 58
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.5526315789473686e-05,
+ "loss": 0.7735,
+ "step": 59
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.578947368421053e-05,
+ "loss": 0.7742,
+ "step": 60
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.605263157894737e-05,
+ "loss": 0.7952,
+ "step": 61
+ },
+ {
+ "epoch": 0.02,
+ "learning_rate": 1.6315789473684213e-05,
+ "loss": 0.7857,
+ "step": 62
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6578947368421053e-05,
+ "loss": 0.7018,
+ "step": 63
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.6842105263157896e-05,
+ "loss": 0.7071,
+ "step": 64
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7105263157894737e-05,
+ "loss": 0.77,
+ "step": 65
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.736842105263158e-05,
+ "loss": 0.77,
+ "step": 66
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.763157894736842e-05,
+ "loss": 0.7569,
+ "step": 67
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.7894736842105264e-05,
+ "loss": 0.7979,
+ "step": 68
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8157894736842107e-05,
+ "loss": 0.8176,
+ "step": 69
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.8421052631578947e-05,
+ "loss": 0.7484,
+ "step": 70
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.868421052631579e-05,
+ "loss": 0.7701,
+ "step": 71
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.894736842105263e-05,
+ "loss": 0.8091,
+ "step": 72
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9210526315789474e-05,
+ "loss": 0.7026,
+ "step": 73
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9473684210526318e-05,
+ "loss": 0.7088,
+ "step": 74
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9736842105263158e-05,
+ "loss": 0.7974,
+ "step": 75
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 2e-05,
+ "loss": 0.7834,
+ "step": 76
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999991635987763e-05,
+ "loss": 0.7535,
+ "step": 77
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999966543965042e-05,
+ "loss": 0.7698,
+ "step": 78
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999924723973812e-05,
+ "loss": 0.7521,
+ "step": 79
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999866176084026e-05,
+ "loss": 0.7368,
+ "step": 80
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999790900393628e-05,
+ "loss": 0.7427,
+ "step": 81
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999698897028537e-05,
+ "loss": 0.7611,
+ "step": 82
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999590166142656e-05,
+ "loss": 0.7592,
+ "step": 83
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.999946470791787e-05,
+ "loss": 0.8147,
+ "step": 84
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999322522564048e-05,
+ "loss": 0.8183,
+ "step": 85
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9999163610319035e-05,
+ "loss": 0.7512,
+ "step": 86
+ },
+ {
+ "epoch": 0.03,
+ "learning_rate": 1.9998987971448664e-05,
+ "loss": 0.7583,
+ "step": 87
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998795606246738e-05,
+ "loss": 0.7338,
+ "step": 88
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998586515035053e-05,
+ "loss": 0.8245,
+ "step": 89
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9998360698163375e-05,
+ "loss": 0.7334,
+ "step": 90
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999811815600945e-05,
+ "loss": 0.7368,
+ "step": 91
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9997858888978997e-05,
+ "loss": 0.7343,
+ "step": 92
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999758289750573e-05,
+ "loss": 0.7862,
+ "step": 93
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999729018205132e-05,
+ "loss": 0.7504,
+ "step": 94
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9996980743105427e-05,
+ "loss": 0.8627,
+ "step": 95
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999665458118568e-05,
+ "loss": 0.8035,
+ "step": 96
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999631169683768e-05,
+ "loss": 0.7763,
+ "step": 97
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995952090635007e-05,
+ "loss": 0.8153,
+ "step": 98
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9995575763179213e-05,
+ "loss": 0.7422,
+ "step": 99
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999518271509982e-05,
+ "loss": 0.775,
+ "step": 100
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999477294705431e-05,
+ "loss": 0.7909,
+ "step": 101
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999434645972816e-05,
+ "loss": 0.7661,
+ "step": 102
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.999390325383479e-05,
+ "loss": 0.7945,
+ "step": 103
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9993443330115592e-05,
+ "loss": 0.8495,
+ "step": 104
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992966689339936e-05,
+ "loss": 0.7199,
+ "step": 105
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9992473332305145e-05,
+ "loss": 0.7933,
+ "step": 106
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991963259836504e-05,
+ "loss": 0.7092,
+ "step": 107
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9991436472787267e-05,
+ "loss": 0.7955,
+ "step": 108
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990892972038646e-05,
+ "loss": 0.7667,
+ "step": 109
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9990332758499805e-05,
+ "loss": 0.8184,
+ "step": 110
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.9989755833107875e-05,
+ "loss": 0.6962,
+ "step": 111
+ },
+ {
+ "epoch": 0.04,
+ "learning_rate": 1.998916219682794e-05,
+ "loss": 0.8129,
+ "step": 112
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998855185065303e-05,
+ "loss": 0.7626,
+ "step": 113
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998792479560414e-05,
+ "loss": 0.8003,
+ "step": 114
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9987281032730206e-05,
+ "loss": 0.7497,
+ "step": 115
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9986620563108117e-05,
+ "loss": 0.7248,
+ "step": 116
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985943387842704e-05,
+ "loss": 0.8085,
+ "step": 117
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9985249508066754e-05,
+ "loss": 0.8019,
+ "step": 118
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9984538924940987e-05,
+ "loss": 0.7606,
+ "step": 119
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998381163965407e-05,
+ "loss": 0.8274,
+ "step": 120
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9983067653422603e-05,
+ "loss": 0.8386,
+ "step": 121
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9982306967491136e-05,
+ "loss": 0.7535,
+ "step": 122
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.998152958313214e-05,
+ "loss": 0.7517,
+ "step": 123
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9980735501646026e-05,
+ "loss": 0.7414,
+ "step": 124
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997992472436114e-05,
+ "loss": 0.8093,
+ "step": 125
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9979097252633748e-05,
+ "loss": 0.7453,
+ "step": 126
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9978253087848046e-05,
+ "loss": 0.7822,
+ "step": 127
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9977392231416158e-05,
+ "loss": 0.7894,
+ "step": 128
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9976514684778124e-05,
+ "loss": 0.795,
+ "step": 129
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9975620449401913e-05,
+ "loss": 0.7371,
+ "step": 130
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997470952678339e-05,
+ "loss": 0.7532,
+ "step": 131
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9973781918446363e-05,
+ "loss": 0.7764,
+ "step": 132
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9972837625942533e-05,
+ "loss": 0.7578,
+ "step": 133
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997187665085151e-05,
+ "loss": 0.7535,
+ "step": 134
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.997089899478082e-05,
+ "loss": 0.7727,
+ "step": 135
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9969904659365887e-05,
+ "loss": 0.7547,
+ "step": 136
+ },
+ {
+ "epoch": 0.05,
+ "learning_rate": 1.9968893646270037e-05,
+ "loss": 0.8014,
+ "step": 137
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99678659571845e-05,
+ "loss": 0.7865,
+ "step": 138
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9966821593828393e-05,
+ "loss": 0.8029,
+ "step": 139
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996576055794873e-05,
+ "loss": 0.7342,
+ "step": 140
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996468285132041e-05,
+ "loss": 0.7565,
+ "step": 141
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9963588475746233e-05,
+ "loss": 0.8076,
+ "step": 142
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9962477433056864e-05,
+ "loss": 0.7731,
+ "step": 143
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996134972511086e-05,
+ "loss": 0.7815,
+ "step": 144
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.996020535379466e-05,
+ "loss": 0.7504,
+ "step": 145
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9959044321022563e-05,
+ "loss": 0.7896,
+ "step": 146
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9957866628736756e-05,
+ "loss": 0.7712,
+ "step": 147
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9956672278907273e-05,
+ "loss": 0.8211,
+ "step": 148
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9955461273532037e-05,
+ "loss": 0.7356,
+ "step": 149
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9954233614636817e-05,
+ "loss": 0.7845,
+ "step": 150
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.995298930427524e-05,
+ "loss": 0.7656,
+ "step": 151
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.99517283445288e-05,
+ "loss": 0.7484,
+ "step": 152
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9950450737506825e-05,
+ "loss": 0.7295,
+ "step": 153
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9949156485346502e-05,
+ "loss": 0.7757,
+ "step": 154
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994784559021286e-05,
+ "loss": 0.8091,
+ "step": 155
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9946518054298768e-05,
+ "loss": 0.787,
+ "step": 156
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994517387982493e-05,
+ "loss": 0.7568,
+ "step": 157
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9943813069039883e-05,
+ "loss": 0.7924,
+ "step": 158
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.994243562422e-05,
+ "loss": 0.746,
+ "step": 159
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9941041547669467e-05,
+ "loss": 0.7858,
+ "step": 160
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.9939630841720302e-05,
+ "loss": 0.7516,
+ "step": 161
+ },
+ {
+ "epoch": 0.06,
+ "learning_rate": 1.993820350873234e-05,
+ "loss": 0.6892,
+ "step": 162
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993675955109322e-05,
+ "loss": 0.724,
+ "step": 163
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993529897121841e-05,
+ "loss": 0.7709,
+ "step": 164
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993382177155116e-05,
+ "loss": 0.7063,
+ "step": 165
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.993232795456254e-05,
+ "loss": 0.7249,
+ "step": 166
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9930817522751403e-05,
+ "loss": 0.7328,
+ "step": 167
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9929290478644412e-05,
+ "loss": 0.7756,
+ "step": 168
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9927746824796003e-05,
+ "loss": 0.7708,
+ "step": 169
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992618656378841e-05,
+ "loss": 0.8001,
+ "step": 170
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9924609698231637e-05,
+ "loss": 0.7606,
+ "step": 171
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.992301623076347e-05,
+ "loss": 0.751,
+ "step": 172
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9921406164049467e-05,
+ "loss": 0.7608,
+ "step": 173
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991977950078295e-05,
+ "loss": 0.7706,
+ "step": 174
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9918136243685003e-05,
+ "loss": 0.7519,
+ "step": 175
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9916476395504473e-05,
+ "loss": 0.7338,
+ "step": 176
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991479995901796e-05,
+ "loss": 0.7371,
+ "step": 177
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991310693702981e-05,
+ "loss": 0.7872,
+ "step": 178
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.991139733237211e-05,
+ "loss": 0.7705,
+ "step": 179
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9909671147904693e-05,
+ "loss": 0.7396,
+ "step": 180
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9907928386515126e-05,
+ "loss": 0.8279,
+ "step": 181
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.990616905111871e-05,
+ "loss": 0.7834,
+ "step": 182
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9904393144658455e-05,
+ "loss": 0.7308,
+ "step": 183
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9902600670105107e-05,
+ "loss": 0.7474,
+ "step": 184
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9900791630457122e-05,
+ "loss": 0.7379,
+ "step": 185
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9898966028740662e-05,
+ "loss": 0.8033,
+ "step": 186
+ },
+ {
+ "epoch": 0.07,
+ "learning_rate": 1.9897123868009606e-05,
+ "loss": 0.6775,
+ "step": 187
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9895265151345516e-05,
+ "loss": 0.829,
+ "step": 188
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9893389881857665e-05,
+ "loss": 0.7769,
+ "step": 189
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9891498062683005e-05,
+ "loss": 0.7526,
+ "step": 190
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9889589696986173e-05,
+ "loss": 0.8029,
+ "step": 191
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9887664787959495e-05,
+ "loss": 0.6893,
+ "step": 192
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.988572333882296e-05,
+ "loss": 0.6915,
+ "step": 193
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9883765352824224e-05,
+ "loss": 0.7772,
+ "step": 194
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9881790833238615e-05,
+ "loss": 0.7542,
+ "step": 195
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9879799783369117e-05,
+ "loss": 0.7547,
+ "step": 196
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987779220654636e-05,
+ "loss": 0.7448,
+ "step": 197
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9875768106128627e-05,
+ "loss": 0.7162,
+ "step": 198
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987372748550183e-05,
+ "loss": 0.7463,
+ "step": 199
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.987167034807953e-05,
+ "loss": 0.7069,
+ "step": 200
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9869596697302912e-05,
+ "loss": 0.728,
+ "step": 201
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9867506536640782e-05,
+ "loss": 0.7538,
+ "step": 202
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9865399869589565e-05,
+ "loss": 0.6684,
+ "step": 203
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9863276699673305e-05,
+ "loss": 0.7547,
+ "step": 204
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9861137030443638e-05,
+ "loss": 0.7663,
+ "step": 205
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985898086547981e-05,
+ "loss": 0.7111,
+ "step": 206
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9856808208388663e-05,
+ "loss": 0.7971,
+ "step": 207
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985461906280462e-05,
+ "loss": 0.7301,
+ "step": 208
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9852413432389685e-05,
+ "loss": 0.7432,
+ "step": 209
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.985019132083345e-05,
+ "loss": 0.7777,
+ "step": 210
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9847952731853067e-05,
+ "loss": 0.7958,
+ "step": 211
+ },
+ {
+ "epoch": 0.08,
+ "learning_rate": 1.9845697669193248e-05,
+ "loss": 0.7768,
+ "step": 212
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984342613662627e-05,
+ "loss": 0.7467,
+ "step": 213
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.984113813795196e-05,
+ "loss": 0.7353,
+ "step": 214
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9838833676997687e-05,
+ "loss": 0.7854,
+ "step": 215
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9836512757618355e-05,
+ "loss": 0.7192,
+ "step": 216
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.983417538369641e-05,
+ "loss": 0.7221,
+ "step": 217
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9831821559141817e-05,
+ "loss": 0.8203,
+ "step": 218
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9829451287892048e-05,
+ "loss": 0.7121,
+ "step": 219
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9827064573912113e-05,
+ "loss": 0.8048,
+ "step": 220
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9824661421194506e-05,
+ "loss": 0.6961,
+ "step": 221
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9822241833759226e-05,
+ "loss": 0.7349,
+ "step": 222
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9819805815653768e-05,
+ "loss": 0.7438,
+ "step": 223
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9817353370953105e-05,
+ "loss": 0.8341,
+ "step": 224
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9814884503759698e-05,
+ "loss": 0.8117,
+ "step": 225
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9812399218203467e-05,
+ "loss": 0.7921,
+ "step": 226
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9809897518441813e-05,
+ "loss": 0.7657,
+ "step": 227
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.980737940865958e-05,
+ "loss": 0.762,
+ "step": 228
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9804844893069063e-05,
+ "loss": 0.8049,
+ "step": 229
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9802293975910016e-05,
+ "loss": 0.7253,
+ "step": 230
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979972666144961e-05,
+ "loss": 0.7999,
+ "step": 231
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9797142953982463e-05,
+ "loss": 0.737,
+ "step": 232
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9794542857830605e-05,
+ "loss": 0.7392,
+ "step": 233
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.979192637734348e-05,
+ "loss": 0.7584,
+ "step": 234
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9789293516897947e-05,
+ "loss": 0.7863,
+ "step": 235
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9786644280898258e-05,
+ "loss": 0.757,
+ "step": 236
+ },
+ {
+ "epoch": 0.09,
+ "learning_rate": 1.9783978673776067e-05,
+ "loss": 0.7025,
+ "step": 237
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9781296699990398e-05,
+ "loss": 0.7393,
+ "step": 238
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977859836402767e-05,
+ "loss": 0.7237,
+ "step": 239
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9775883670401667e-05,
+ "loss": 0.7309,
+ "step": 240
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9773152623653535e-05,
+ "loss": 0.7592,
+ "step": 241
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.977040522835177e-05,
+ "loss": 0.8258,
+ "step": 242
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9767641489092232e-05,
+ "loss": 0.6867,
+ "step": 243
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97648614104981e-05,
+ "loss": 0.7393,
+ "step": 244
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.97620649972199e-05,
+ "loss": 0.7252,
+ "step": 245
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9759252253935485e-05,
+ "loss": 0.7487,
+ "step": 246
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975642318535001e-05,
+ "loss": 0.7086,
+ "step": 247
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.975357779619595e-05,
+ "loss": 0.8465,
+ "step": 248
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9750716091233083e-05,
+ "loss": 0.7299,
+ "step": 249
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974783807524847e-05,
+ "loss": 0.8084,
+ "step": 250
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974494375305647e-05,
+ "loss": 0.7474,
+ "step": 251
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.974203312949871e-05,
+ "loss": 0.7297,
+ "step": 252
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9739106209444087e-05,
+ "loss": 0.7383,
+ "step": 253
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9736162997788757e-05,
+ "loss": 0.721,
+ "step": 254
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9733203499456136e-05,
+ "loss": 0.7258,
+ "step": 255
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9730227719396883e-05,
+ "loss": 0.7628,
+ "step": 256
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9727235662588882e-05,
+ "loss": 0.7301,
+ "step": 257
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9724227334037255e-05,
+ "loss": 0.6904,
+ "step": 258
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9721202738774346e-05,
+ "loss": 0.7733,
+ "step": 259
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9718161881859703e-05,
+ "loss": 0.8125,
+ "step": 260
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9715104768380078e-05,
+ "loss": 0.7403,
+ "step": 261
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.971203140344942e-05,
+ "loss": 0.786,
+ "step": 262
+ },
+ {
+ "epoch": 0.1,
+ "learning_rate": 1.9708941792208857e-05,
+ "loss": 0.7585,
+ "step": 263
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9705835939826704e-05,
+ "loss": 0.7617,
+ "step": 264
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9702713851498436e-05,
+ "loss": 0.7378,
+ "step": 265
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9699575532446688e-05,
+ "loss": 0.7349,
+ "step": 266
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.969642098792125e-05,
+ "loss": 0.6457,
+ "step": 267
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9693250223199053e-05,
+ "loss": 0.7271,
+ "step": 268
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9690063243584153e-05,
+ "loss": 0.7913,
+ "step": 269
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9686860054407745e-05,
+ "loss": 0.7387,
+ "step": 270
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.968364066102813e-05,
+ "loss": 0.7346,
+ "step": 271
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9680405068830716e-05,
+ "loss": 0.8078,
+ "step": 272
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967715328322801e-05,
+ "loss": 0.6948,
+ "step": 273
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967388530965961e-05,
+ "loss": 0.777,
+ "step": 274
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.967060115359218e-05,
+ "loss": 0.7895,
+ "step": 275
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9667300820519478e-05,
+ "loss": 0.7989,
+ "step": 276
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.96639843159623e-05,
+ "loss": 0.7681,
+ "step": 277
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9660651645468504e-05,
+ "loss": 0.7821,
+ "step": 278
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965730281461299e-05,
+ "loss": 0.7032,
+ "step": 279
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9653937828997694e-05,
+ "loss": 0.7108,
+ "step": 280
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.965055669425157e-05,
+ "loss": 0.761,
+ "step": 281
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9647159416030588e-05,
+ "loss": 0.7838,
+ "step": 282
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9643746000017722e-05,
+ "loss": 0.775,
+ "step": 283
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9640316451922943e-05,
+ "loss": 0.7452,
+ "step": 284
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9636870777483207e-05,
+ "loss": 0.7426,
+ "step": 285
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9633408982462453e-05,
+ "loss": 0.727,
+ "step": 286
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9629931072651572e-05,
+ "loss": 0.7401,
+ "step": 287
+ },
+ {
+ "epoch": 0.11,
+ "learning_rate": 1.9626437053868423e-05,
+ "loss": 0.7729,
+ "step": 288
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.962292693195781e-05,
+ "loss": 0.7306,
+ "step": 289
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9619400712791473e-05,
+ "loss": 0.7302,
+ "step": 290
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.961585840226808e-05,
+ "loss": 0.759,
+ "step": 291
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9612300006313216e-05,
+ "loss": 0.7353,
+ "step": 292
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9608725530879376e-05,
+ "loss": 0.7382,
+ "step": 293
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.960513498194595e-05,
+ "loss": 0.7032,
+ "step": 294
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9601528365519218e-05,
+ "loss": 0.7124,
+ "step": 295
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9597905687632336e-05,
+ "loss": 0.737,
+ "step": 296
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.959426695434533e-05,
+ "loss": 0.7795,
+ "step": 297
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9590612171745076e-05,
+ "loss": 0.798,
+ "step": 298
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.958694134594531e-05,
+ "loss": 0.7565,
+ "step": 299
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.95832544830866e-05,
+ "loss": 0.7563,
+ "step": 300
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9579551589336333e-05,
+ "loss": 0.751,
+ "step": 301
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9575832670888723e-05,
+ "loss": 0.7304,
+ "step": 302
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.957209773396478e-05,
+ "loss": 0.796,
+ "step": 303
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.956834678481232e-05,
+ "loss": 0.7291,
+ "step": 304
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9564579829705945e-05,
+ "loss": 0.7744,
+ "step": 305
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9560796874947016e-05,
+ "loss": 0.6635,
+ "step": 306
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9556997926863676e-05,
+ "loss": 0.7361,
+ "step": 307
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9553182991810812e-05,
+ "loss": 0.6839,
+ "step": 308
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954935207617006e-05,
+ "loss": 0.756,
+ "step": 309
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.954550518634978e-05,
+ "loss": 0.7671,
+ "step": 310
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9541642328785065e-05,
+ "loss": 0.7566,
+ "step": 311
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9537763509937707e-05,
+ "loss": 0.764,
+ "step": 312
+ },
+ {
+ "epoch": 0.12,
+ "learning_rate": 1.9533868736296205e-05,
+ "loss": 0.7101,
+ "step": 313
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9529958014375748e-05,
+ "loss": 0.6967,
+ "step": 314
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.95260313507182e-05,
+ "loss": 0.7406,
+ "step": 315
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.952208875189209e-05,
+ "loss": 0.7519,
+ "step": 316
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9518130224492612e-05,
+ "loss": 0.714,
+ "step": 317
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9514155775141602e-05,
+ "loss": 0.7515,
+ "step": 318
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9510165410487524e-05,
+ "loss": 0.7191,
+ "step": 319
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.950615913720547e-05,
+ "loss": 0.704,
+ "step": 320
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9502136961997144e-05,
+ "loss": 0.7599,
+ "step": 321
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9498098891590855e-05,
+ "loss": 0.7826,
+ "step": 322
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9494044932741488e-05,
+ "loss": 0.7501,
+ "step": 323
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.948997509223052e-05,
+ "loss": 0.7208,
+ "step": 324
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9485889376865998e-05,
+ "loss": 0.6783,
+ "step": 325
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9481787793482503e-05,
+ "loss": 0.7321,
+ "step": 326
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9477670348941182e-05,
+ "loss": 0.7559,
+ "step": 327
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9473537050129704e-05,
+ "loss": 0.812,
+ "step": 328
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9469387903962263e-05,
+ "loss": 0.7698,
+ "step": 329
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.946522291737956e-05,
+ "loss": 0.658,
+ "step": 330
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9461042097348796e-05,
+ "loss": 0.773,
+ "step": 331
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9456845450863654e-05,
+ "loss": 0.7677,
+ "step": 332
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9452632984944295e-05,
+ "loss": 0.7432,
+ "step": 333
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9448404706637344e-05,
+ "loss": 0.7416,
+ "step": 334
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9444160623015877e-05,
+ "loss": 0.7716,
+ "step": 335
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.94399007411794e-05,
+ "loss": 0.8182,
+ "step": 336
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9435625068253863e-05,
+ "loss": 0.78,
+ "step": 337
+ },
+ {
+ "epoch": 0.13,
+ "learning_rate": 1.9431333611391615e-05,
+ "loss": 0.7344,
+ "step": 338
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.942702637777142e-05,
+ "loss": 0.8004,
+ "step": 339
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9422703374598425e-05,
+ "loss": 0.7734,
+ "step": 340
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9418364609104166e-05,
+ "loss": 0.736,
+ "step": 341
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9414010088546535e-05,
+ "loss": 0.7432,
+ "step": 342
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.940963982020979e-05,
+ "loss": 0.7679,
+ "step": 343
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9405253811404522e-05,
+ "loss": 0.8006,
+ "step": 344
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9400852069467657e-05,
+ "loss": 0.7697,
+ "step": 345
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9396434601762444e-05,
+ "loss": 0.7075,
+ "step": 346
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.939200141567843e-05,
+ "loss": 0.739,
+ "step": 347
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.938755251863146e-05,
+ "loss": 0.7132,
+ "step": 348
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9383087918063662e-05,
+ "loss": 0.7647,
+ "step": 349
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.937860762144343e-05,
+ "loss": 0.6678,
+ "step": 350
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9374111636265412e-05,
+ "loss": 0.6744,
+ "step": 351
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9369599970050506e-05,
+ "loss": 0.7645,
+ "step": 352
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9365072630345838e-05,
+ "loss": 0.717,
+ "step": 353
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9360529624724754e-05,
+ "loss": 0.7083,
+ "step": 354
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9355970960786803e-05,
+ "loss": 0.7598,
+ "step": 355
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.935139664615773e-05,
+ "loss": 0.7078,
+ "step": 356
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.934680668848946e-05,
+ "loss": 0.7911,
+ "step": 357
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9342201095460083e-05,
+ "loss": 0.7303,
+ "step": 358
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933757987477385e-05,
+ "loss": 0.7327,
+ "step": 359
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.933294303416115e-05,
+ "loss": 0.7987,
+ "step": 360
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9328290581378497e-05,
+ "loss": 0.74,
+ "step": 361
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.932362252420853e-05,
+ "loss": 0.7082,
+ "step": 362
+ },
+ {
+ "epoch": 0.14,
+ "learning_rate": 1.9318938870459984e-05,
+ "loss": 0.7192,
+ "step": 363
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9314239627967688e-05,
+ "loss": 0.7877,
+ "step": 364
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9309524804592545e-05,
+ "loss": 0.7585,
+ "step": 365
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9304794408221524e-05,
+ "loss": 0.6973,
+ "step": 366
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9300048446767643e-05,
+ "loss": 0.7342,
+ "step": 367
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929528692816996e-05,
+ "loss": 0.7235,
+ "step": 368
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.929050986039355e-05,
+ "loss": 0.7652,
+ "step": 369
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9285717251429507e-05,
+ "loss": 0.7564,
+ "step": 370
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.928090910929492e-05,
+ "loss": 0.7638,
+ "step": 371
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.927608544203286e-05,
+ "loss": 0.7482,
+ "step": 372
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9271246257712368e-05,
+ "loss": 0.7062,
+ "step": 373
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.926639156442845e-05,
+ "loss": 0.7443,
+ "step": 374
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9261521370302035e-05,
+ "loss": 0.7719,
+ "step": 375
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.925663568348001e-05,
+ "loss": 0.7318,
+ "step": 376
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9251734512135155e-05,
+ "loss": 0.7063,
+ "step": 377
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9246817864466166e-05,
+ "loss": 0.814,
+ "step": 378
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9241885748697618e-05,
+ "loss": 0.748,
+ "step": 379
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9236938173079974e-05,
+ "loss": 0.7601,
+ "step": 380
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9231975145889543e-05,
+ "loss": 0.8213,
+ "step": 381
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9226996675428497e-05,
+ "loss": 0.794,
+ "step": 382
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9222002770024827e-05,
+ "loss": 0.7542,
+ "step": 383
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.921699343803235e-05,
+ "loss": 0.7133,
+ "step": 384
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9211968687830693e-05,
+ "loss": 0.6643,
+ "step": 385
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.9206928527825268e-05,
+ "loss": 0.7552,
+ "step": 386
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.920187296644727e-05,
+ "loss": 0.7372,
+ "step": 387
+ },
+ {
+ "epoch": 0.15,
+ "learning_rate": 1.919680201215365e-05,
+ "loss": 0.741,
+ "step": 388
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9191715673427113e-05,
+ "loss": 0.649,
+ "step": 389
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.91866139587761e-05,
+ "loss": 0.704,
+ "step": 390
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.918149687673478e-05,
+ "loss": 0.7599,
+ "step": 391
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9176364435863005e-05,
+ "loss": 0.7196,
+ "step": 392
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9171216644746345e-05,
+ "loss": 0.7109,
+ "step": 393
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9166053511996035e-05,
+ "loss": 0.6849,
+ "step": 394
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9160875046248974e-05,
+ "loss": 0.6786,
+ "step": 395
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.915568125616772e-05,
+ "loss": 0.6608,
+ "step": 396
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9150472150440443e-05,
+ "loss": 0.7156,
+ "step": 397
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9145247737780963e-05,
+ "loss": 0.6825,
+ "step": 398
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9140008026928683e-05,
+ "loss": 0.7512,
+ "step": 399
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9134753026648605e-05,
+ "loss": 0.8204,
+ "step": 400
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9129482745731308e-05,
+ "loss": 0.821,
+ "step": 401
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9124197192992927e-05,
+ "loss": 0.6878,
+ "step": 402
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.911889637727515e-05,
+ "loss": 0.7774,
+ "step": 403
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9113580307445196e-05,
+ "loss": 0.7435,
+ "step": 404
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9108248992395797e-05,
+ "loss": 0.7764,
+ "step": 405
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.910290244104519e-05,
+ "loss": 0.706,
+ "step": 406
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9097540662337102e-05,
+ "loss": 0.7264,
+ "step": 407
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9092163665240726e-05,
+ "loss": 0.729,
+ "step": 408
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9086771458750716e-05,
+ "loss": 0.7105,
+ "step": 409
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9081364051887173e-05,
+ "loss": 0.7846,
+ "step": 410
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9075941453695617e-05,
+ "loss": 0.7441,
+ "step": 411
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9070503673246982e-05,
+ "loss": 0.7402,
+ "step": 412
+ },
+ {
+ "epoch": 0.16,
+ "learning_rate": 1.9065050719637604e-05,
+ "loss": 0.7853,
+ "step": 413
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9059582601989196e-05,
+ "loss": 0.7181,
+ "step": 414
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9054099329448835e-05,
+ "loss": 0.7434,
+ "step": 415
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904860091118896e-05,
+ "loss": 0.7242,
+ "step": 416
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.904308735640733e-05,
+ "loss": 0.7389,
+ "step": 417
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9037558674327036e-05,
+ "loss": 0.7467,
+ "step": 418
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9032014874196476e-05,
+ "loss": 0.658,
+ "step": 419
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9026455965289326e-05,
+ "loss": 0.7672,
+ "step": 420
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9020881956904543e-05,
+ "loss": 0.758,
+ "step": 421
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.901529285836635e-05,
+ "loss": 0.6795,
+ "step": 422
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.900968867902419e-05,
+ "loss": 0.7556,
+ "step": 423
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.9004069428252765e-05,
+ "loss": 0.7542,
+ "step": 424
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.899843511545196e-05,
+ "loss": 0.7258,
+ "step": 425
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8992785750046866e-05,
+ "loss": 0.7832,
+ "step": 426
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.898712134148776e-05,
+ "loss": 0.7079,
+ "step": 427
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8981441899250082e-05,
+ "loss": 0.8208,
+ "step": 428
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.897574743283441e-05,
+ "loss": 0.7155,
+ "step": 429
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8970037951766468e-05,
+ "loss": 0.761,
+ "step": 430
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.896431346559708e-05,
+ "loss": 0.6913,
+ "step": 431
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895857398390219e-05,
+ "loss": 0.7586,
+ "step": 432
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.895281951628281e-05,
+ "loss": 0.8129,
+ "step": 433
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8947050072365033e-05,
+ "loss": 0.7142,
+ "step": 434
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.89412656618e-05,
+ "loss": 0.7592,
+ "step": 435
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8935466294263876e-05,
+ "loss": 0.7026,
+ "step": 436
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.8929651979457874e-05,
+ "loss": 0.7668,
+ "step": 437
+ },
+ {
+ "epoch": 0.17,
+ "learning_rate": 1.892382272710818e-05,
+ "loss": 0.7904,
+ "step": 438
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891797854696599e-05,
+ "loss": 0.6975,
+ "step": 439
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.891211944880746e-05,
+ "loss": 0.6818,
+ "step": 440
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.89062454424337e-05,
+ "loss": 0.7532,
+ "step": 441
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8900356537670768e-05,
+ "loss": 0.741,
+ "step": 442
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8894452744369632e-05,
+ "loss": 0.7386,
+ "step": 443
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8888534072406182e-05,
+ "loss": 0.6238,
+ "step": 444
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.888260053168118e-05,
+ "loss": 0.7396,
+ "step": 445
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8876652132120267e-05,
+ "loss": 0.7695,
+ "step": 446
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.887068888367394e-05,
+ "loss": 0.7542,
+ "step": 447
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8864710796317537e-05,
+ "loss": 0.7169,
+ "step": 448
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.885871788005122e-05,
+ "loss": 0.7246,
+ "step": 449
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8852710144899948e-05,
+ "loss": 0.7823,
+ "step": 450
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.884668760091348e-05,
+ "loss": 0.779,
+ "step": 451
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8840650258166343e-05,
+ "loss": 0.6892,
+ "step": 452
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8834598126757812e-05,
+ "loss": 0.7124,
+ "step": 453
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8828531216811912e-05,
+ "loss": 0.6769,
+ "step": 454
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.882244953847739e-05,
+ "loss": 0.7964,
+ "step": 455
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8816353101927686e-05,
+ "loss": 0.7336,
+ "step": 456
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.881024191736093e-05,
+ "loss": 0.7643,
+ "step": 457
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8804115994999933e-05,
+ "loss": 0.7057,
+ "step": 458
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8797975345092153e-05,
+ "loss": 0.7075,
+ "step": 459
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8791819977909682e-05,
+ "loss": 0.761,
+ "step": 460
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8785649903749236e-05,
+ "loss": 0.73,
+ "step": 461
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.8779465132932124e-05,
+ "loss": 0.7415,
+ "step": 462
+ },
+ {
+ "epoch": 0.18,
+ "learning_rate": 1.877326567580425e-05,
+ "loss": 0.6845,
+ "step": 463
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8767051542736084e-05,
+ "loss": 0.7186,
+ "step": 464
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8760822744122637e-05,
+ "loss": 0.7765,
+ "step": 465
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8754579290383465e-05,
+ "loss": 0.7513,
+ "step": 466
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8748321191962625e-05,
+ "loss": 0.7378,
+ "step": 467
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8742048459328684e-05,
+ "loss": 0.6771,
+ "step": 468
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8735761102974684e-05,
+ "loss": 0.7933,
+ "step": 469
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.872945913341813e-05,
+ "loss": 0.7251,
+ "step": 470
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8723142561200973e-05,
+ "loss": 0.7034,
+ "step": 471
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.871681139688959e-05,
+ "loss": 0.7615,
+ "step": 472
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8710465651074766e-05,
+ "loss": 0.7281,
+ "step": 473
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8704105334371683e-05,
+ "loss": 0.711,
+ "step": 474
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8697730457419893e-05,
+ "loss": 0.7163,
+ "step": 475
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8691341030883306e-05,
+ "loss": 0.7463,
+ "step": 476
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.868493706545017e-05,
+ "loss": 0.6915,
+ "step": 477
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8678518571833054e-05,
+ "loss": 0.7136,
+ "step": 478
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.867208556076883e-05,
+ "loss": 0.7688,
+ "step": 479
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.866563804301866e-05,
+ "loss": 0.7246,
+ "step": 480
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8659176029367957e-05,
+ "loss": 0.7658,
+ "step": 481
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8652699530626398e-05,
+ "loss": 0.7479,
+ "step": 482
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8646208557627888e-05,
+ "loss": 0.7162,
+ "step": 483
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8639703121230542e-05,
+ "loss": 0.8049,
+ "step": 484
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8633183232316668e-05,
+ "loss": 0.7076,
+ "step": 485
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8626648901792753e-05,
+ "loss": 0.73,
+ "step": 486
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8620100140589443e-05,
+ "loss": 0.7241,
+ "step": 487
+ },
+ {
+ "epoch": 0.19,
+ "learning_rate": 1.8613536959661518e-05,
+ "loss": 0.7428,
+ "step": 488
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8606959369987885e-05,
+ "loss": 0.742,
+ "step": 489
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.860036738257155e-05,
+ "loss": 0.7701,
+ "step": 490
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8593761008439614e-05,
+ "loss": 0.707,
+ "step": 491
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8587140258643225e-05,
+ "loss": 0.758,
+ "step": 492
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8580505144257595e-05,
+ "loss": 0.7171,
+ "step": 493
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.857385567638196e-05,
+ "loss": 0.7736,
+ "step": 494
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8567191866139564e-05,
+ "loss": 0.7574,
+ "step": 495
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8560513724677644e-05,
+ "loss": 0.722,
+ "step": 496
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8553821263167417e-05,
+ "loss": 0.7483,
+ "step": 497
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8547114492804042e-05,
+ "loss": 0.7371,
+ "step": 498
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8540393424806627e-05,
+ "loss": 0.7309,
+ "step": 499
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8533658070418186e-05,
+ "loss": 0.7407,
+ "step": 500
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.852690844090564e-05,
+ "loss": 0.7683,
+ "step": 501
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8520144547559783e-05,
+ "loss": 0.7944,
+ "step": 502
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8513366401695277e-05,
+ "loss": 0.764,
+ "step": 503
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8506574014650613e-05,
+ "loss": 0.6717,
+ "step": 504
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849976739778812e-05,
+ "loss": 0.717,
+ "step": 505
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.849294656249392e-05,
+ "loss": 0.7918,
+ "step": 506
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8486111520177925e-05,
+ "loss": 0.733,
+ "step": 507
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8479262282273805e-05,
+ "loss": 0.7766,
+ "step": 508
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8472398860238988e-05,
+ "loss": 0.793,
+ "step": 509
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8465521265554623e-05,
+ "loss": 0.6814,
+ "step": 510
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8458629509725565e-05,
+ "loss": 0.7911,
+ "step": 511
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.845172360428036e-05,
+ "loss": 0.7566,
+ "step": 512
+ },
+ {
+ "epoch": 0.2,
+ "learning_rate": 1.8444803560771222e-05,
+ "loss": 0.7517,
+ "step": 513
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8437869390774022e-05,
+ "loss": 0.7042,
+ "step": 514
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8430921105888254e-05,
+ "loss": 0.6989,
+ "step": 515
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.842395871773702e-05,
+ "loss": 0.7495,
+ "step": 516
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.841698223796703e-05,
+ "loss": 0.7341,
+ "step": 517
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8409991678248548e-05,
+ "loss": 0.6794,
+ "step": 518
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8402987050275408e-05,
+ "loss": 0.6824,
+ "step": 519
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.839596836576496e-05,
+ "loss": 0.7188,
+ "step": 520
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.838893563645808e-05,
+ "loss": 0.6905,
+ "step": 521
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8381888874119135e-05,
+ "loss": 0.7466,
+ "step": 522
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.837482809053597e-05,
+ "loss": 0.7712,
+ "step": 523
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8367753297519874e-05,
+ "loss": 0.6934,
+ "step": 524
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8360664506905587e-05,
+ "loss": 0.7006,
+ "step": 525
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8353561730551243e-05,
+ "loss": 0.7504,
+ "step": 526
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8346444980338394e-05,
+ "loss": 0.7613,
+ "step": 527
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8339314268171953e-05,
+ "loss": 0.7335,
+ "step": 528
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8332169605980193e-05,
+ "loss": 0.7342,
+ "step": 529
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8325011005714726e-05,
+ "loss": 0.6304,
+ "step": 530
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8317838479350473e-05,
+ "loss": 0.74,
+ "step": 531
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.831065203888565e-05,
+ "loss": 0.7669,
+ "step": 532
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.830345169634176e-05,
+ "loss": 0.6751,
+ "step": 533
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8296237463763543e-05,
+ "loss": 0.7026,
+ "step": 534
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8289009353218995e-05,
+ "loss": 0.6695,
+ "step": 535
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8281767376799315e-05,
+ "loss": 0.722,
+ "step": 536
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.82745115466189e-05,
+ "loss": 0.714,
+ "step": 537
+ },
+ {
+ "epoch": 0.21,
+ "learning_rate": 1.8267241874815312e-05,
+ "loss": 0.7616,
+ "step": 538
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8259958373549285e-05,
+ "loss": 0.7431,
+ "step": 539
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8252661055004672e-05,
+ "loss": 0.763,
+ "step": 540
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.824534993138845e-05,
+ "loss": 0.6588,
+ "step": 541
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.823802501493068e-05,
+ "loss": 0.6979,
+ "step": 542
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8230686317884502e-05,
+ "loss": 0.7634,
+ "step": 543
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8223333852526107e-05,
+ "loss": 0.7769,
+ "step": 544
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8215967631154718e-05,
+ "loss": 0.6473,
+ "step": 545
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8208587666092566e-05,
+ "loss": 0.7221,
+ "step": 546
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8201193969684875e-05,
+ "loss": 0.6316,
+ "step": 547
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.819378655429984e-05,
+ "loss": 0.7503,
+ "step": 548
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8186365432328605e-05,
+ "loss": 0.7571,
+ "step": 549
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8178930616185236e-05,
+ "loss": 0.6551,
+ "step": 550
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8171482118306712e-05,
+ "loss": 0.7076,
+ "step": 551
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8164019951152903e-05,
+ "loss": 0.7201,
+ "step": 552
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8156544127206536e-05,
+ "loss": 0.6466,
+ "step": 553
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8149054658973194e-05,
+ "loss": 0.7187,
+ "step": 554
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.814155155898127e-05,
+ "loss": 0.7567,
+ "step": 555
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8134034839781972e-05,
+ "loss": 0.7637,
+ "step": 556
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8126504513949287e-05,
+ "loss": 0.7026,
+ "step": 557
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811896059407996e-05,
+ "loss": 0.7503,
+ "step": 558
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.811140309279348e-05,
+ "loss": 0.7236,
+ "step": 559
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8103832022732056e-05,
+ "loss": 0.7574,
+ "step": 560
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8096247396560585e-05,
+ "loss": 0.7162,
+ "step": 561
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8088649226966658e-05,
+ "loss": 0.7208,
+ "step": 562
+ },
+ {
+ "epoch": 0.22,
+ "learning_rate": 1.8081037526660505e-05,
+ "loss": 0.7285,
+ "step": 563
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8073412308375e-05,
+ "loss": 0.7129,
+ "step": 564
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8065773584865623e-05,
+ "loss": 0.7848,
+ "step": 565
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.805812136891046e-05,
+ "loss": 0.7196,
+ "step": 566
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8050455673310142e-05,
+ "loss": 0.7233,
+ "step": 567
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.804277651088787e-05,
+ "loss": 0.7768,
+ "step": 568
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8035083894489364e-05,
+ "loss": 0.7033,
+ "step": 569
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8027377836982857e-05,
+ "loss": 0.7374,
+ "step": 570
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8019658351259056e-05,
+ "loss": 0.717,
+ "step": 571
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8011925450231135e-05,
+ "loss": 0.7364,
+ "step": 572
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.8004179146834712e-05,
+ "loss": 0.7795,
+ "step": 573
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7996419454027828e-05,
+ "loss": 0.7178,
+ "step": 574
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.79886463847909e-05,
+ "loss": 0.7197,
+ "step": 575
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7980859952126755e-05,
+ "loss": 0.7345,
+ "step": 576
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7973060169060546e-05,
+ "loss": 0.7669,
+ "step": 577
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.796524704863977e-05,
+ "loss": 0.6962,
+ "step": 578
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.795742060393424e-05,
+ "loss": 0.6952,
+ "step": 579
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7949580848036045e-05,
+ "loss": 0.6288,
+ "step": 580
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7941727794059553e-05,
+ "loss": 0.7703,
+ "step": 581
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7933861455141365e-05,
+ "loss": 0.6873,
+ "step": 582
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.792598184444032e-05,
+ "loss": 0.663,
+ "step": 583
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7918088975137445e-05,
+ "loss": 0.7452,
+ "step": 584
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7910182860435954e-05,
+ "loss": 0.7534,
+ "step": 585
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7902263513561214e-05,
+ "loss": 0.7354,
+ "step": 586
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.7894330947760728e-05,
+ "loss": 0.7902,
+ "step": 587
+ },
+ {
+ "epoch": 0.23,
+ "learning_rate": 1.788638517630411e-05,
+ "loss": 0.7369,
+ "step": 588
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7878426212483066e-05,
+ "loss": 0.7358,
+ "step": 589
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7870454069611375e-05,
+ "loss": 0.7771,
+ "step": 590
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.786246876102485e-05,
+ "loss": 0.656,
+ "step": 591
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7854470300081338e-05,
+ "loss": 0.7169,
+ "step": 592
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7846458700160685e-05,
+ "loss": 0.6286,
+ "step": 593
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7838433974664714e-05,
+ "loss": 0.7738,
+ "step": 594
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7830396137017202e-05,
+ "loss": 0.7582,
+ "step": 595
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7822345200663874e-05,
+ "loss": 0.7393,
+ "step": 596
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7814281179072345e-05,
+ "loss": 0.7096,
+ "step": 597
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.780620408573213e-05,
+ "loss": 0.6843,
+ "step": 598
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7798113934154623e-05,
+ "loss": 0.7575,
+ "step": 599
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7790010737873035e-05,
+ "loss": 0.688,
+ "step": 600
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.778189451044242e-05,
+ "loss": 0.7294,
+ "step": 601
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.777376526543962e-05,
+ "loss": 0.7799,
+ "step": 602
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.776562301646326e-05,
+ "loss": 0.7112,
+ "step": 603
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.775746777713371e-05,
+ "loss": 0.7384,
+ "step": 604
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7749299561093076e-05,
+ "loss": 0.7376,
+ "step": 605
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7741118382005172e-05,
+ "loss": 0.7321,
+ "step": 606
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7732924253555487e-05,
+ "loss": 0.6928,
+ "step": 607
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.772471718945119e-05,
+ "loss": 0.6811,
+ "step": 608
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7716497203421072e-05,
+ "loss": 0.7019,
+ "step": 609
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7708264309215543e-05,
+ "loss": 0.6797,
+ "step": 610
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.770001852060661e-05,
+ "loss": 0.7278,
+ "step": 611
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7691759851387852e-05,
+ "loss": 0.7607,
+ "step": 612
+ },
+ {
+ "epoch": 0.24,
+ "learning_rate": 1.7683488315374386e-05,
+ "loss": 0.677,
+ "step": 613
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7675203926402863e-05,
+ "loss": 0.7376,
+ "step": 614
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7666906698331426e-05,
+ "loss": 0.7472,
+ "step": 615
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.76585966450397e-05,
+ "loss": 0.6612,
+ "step": 616
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.765027378042876e-05,
+ "loss": 0.6598,
+ "step": 617
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.764193811842112e-05,
+ "loss": 0.7401,
+ "step": 618
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.763358967296069e-05,
+ "loss": 0.7545,
+ "step": 619
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.762522845801277e-05,
+ "loss": 0.7671,
+ "step": 620
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7616854487564024e-05,
+ "loss": 0.7466,
+ "step": 621
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7608467775622448e-05,
+ "loss": 0.6709,
+ "step": 622
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7600068336217355e-05,
+ "loss": 0.7305,
+ "step": 623
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.759165618339935e-05,
+ "loss": 0.7245,
+ "step": 624
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7583231331240297e-05,
+ "loss": 0.7318,
+ "step": 625
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7574793793833312e-05,
+ "loss": 0.7379,
+ "step": 626
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7566343585292732e-05,
+ "loss": 0.7605,
+ "step": 627
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7557880719754082e-05,
+ "loss": 0.6775,
+ "step": 628
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7549405211374072e-05,
+ "loss": 0.7228,
+ "step": 629
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7540917074330543e-05,
+ "loss": 0.7659,
+ "step": 630
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7532416322822473e-05,
+ "loss": 0.7667,
+ "step": 631
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7523902971069944e-05,
+ "loss": 0.6815,
+ "step": 632
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7515377033314113e-05,
+ "loss": 0.7059,
+ "step": 633
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7506838523817183e-05,
+ "loss": 0.795,
+ "step": 634
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7498287456862398e-05,
+ "loss": 0.7417,
+ "step": 635
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7489723846754e-05,
+ "loss": 0.6858,
+ "step": 636
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7481147707817227e-05,
+ "loss": 0.6627,
+ "step": 637
+ },
+ {
+ "epoch": 0.25,
+ "learning_rate": 1.7472559054398256e-05,
+ "loss": 0.7482,
+ "step": 638
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.746395790086421e-05,
+ "loss": 0.6898,
+ "step": 639
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7455344261603116e-05,
+ "loss": 0.6523,
+ "step": 640
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7446718151023894e-05,
+ "loss": 0.7312,
+ "step": 641
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7438079583556326e-05,
+ "loss": 0.6984,
+ "step": 642
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7429428573651023e-05,
+ "loss": 0.6524,
+ "step": 643
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.742076513577942e-05,
+ "loss": 0.7135,
+ "step": 644
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7412089284433734e-05,
+ "loss": 0.7082,
+ "step": 645
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7403401034126954e-05,
+ "loss": 0.731,
+ "step": 646
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7394700399392804e-05,
+ "loss": 0.751,
+ "step": 647
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7385987394785723e-05,
+ "loss": 0.7843,
+ "step": 648
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7377262034880854e-05,
+ "loss": 0.7209,
+ "step": 649
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7368524334273998e-05,
+ "loss": 0.7101,
+ "step": 650
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7359774307581602e-05,
+ "loss": 0.7177,
+ "step": 651
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7351011969440728e-05,
+ "loss": 0.7787,
+ "step": 652
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.734223733450904e-05,
+ "loss": 0.7481,
+ "step": 653
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.733345041746477e-05,
+ "loss": 0.7154,
+ "step": 654
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.732465123300669e-05,
+ "loss": 0.6664,
+ "step": 655
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7315839795854103e-05,
+ "loss": 0.7251,
+ "step": 656
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.73070161207468e-05,
+ "loss": 0.7233,
+ "step": 657
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7298180222445043e-05,
+ "loss": 0.724,
+ "step": 658
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7289332115729554e-05,
+ "loss": 0.705,
+ "step": 659
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7280471815401453e-05,
+ "loss": 0.6835,
+ "step": 660
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7271599336282285e-05,
+ "loss": 0.74,
+ "step": 661
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7262714693213948e-05,
+ "loss": 0.6952,
+ "step": 662
+ },
+ {
+ "epoch": 0.26,
+ "learning_rate": 1.7253817901058698e-05,
+ "loss": 0.7798,
+ "step": 663
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7244908974699112e-05,
+ "loss": 0.6579,
+ "step": 664
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7235987929038056e-05,
+ "loss": 0.7098,
+ "step": 665
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7227054778998682e-05,
+ "loss": 0.69,
+ "step": 666
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.721810953952439e-05,
+ "loss": 0.7221,
+ "step": 667
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.720915222557879e-05,
+ "loss": 0.704,
+ "step": 668
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7200182852145705e-05,
+ "loss": 0.624,
+ "step": 669
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7191201434229118e-05,
+ "loss": 0.7107,
+ "step": 670
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7182207986853176e-05,
+ "loss": 0.7488,
+ "step": 671
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7173202525062133e-05,
+ "loss": 0.6899,
+ "step": 672
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.716418506392035e-05,
+ "loss": 0.7479,
+ "step": 673
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7155155618512255e-05,
+ "loss": 0.7212,
+ "step": 674
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7146114203942332e-05,
+ "loss": 0.74,
+ "step": 675
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7137060835335076e-05,
+ "loss": 0.7779,
+ "step": 676
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.712799552783499e-05,
+ "loss": 0.7303,
+ "step": 677
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7118918296606536e-05,
+ "loss": 0.7146,
+ "step": 678
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7109829156834133e-05,
+ "loss": 0.6549,
+ "step": 679
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7100728123722115e-05,
+ "loss": 0.7206,
+ "step": 680
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7091615212494714e-05,
+ "loss": 0.7516,
+ "step": 681
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7082490438396027e-05,
+ "loss": 0.7719,
+ "step": 682
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.707335381669e-05,
+ "loss": 0.7464,
+ "step": 683
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7064205362660397e-05,
+ "loss": 0.7458,
+ "step": 684
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7055045091610772e-05,
+ "loss": 0.7825,
+ "step": 685
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.7045873018864452e-05,
+ "loss": 0.6619,
+ "step": 686
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70366891597645e-05,
+ "loss": 0.7329,
+ "step": 687
+ },
+ {
+ "epoch": 0.27,
+ "learning_rate": 1.70274935296737e-05,
+ "loss": 0.7504,
+ "step": 688
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.701828614397452e-05,
+ "loss": 0.7381,
+ "step": 689
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.7009067018069106e-05,
+ "loss": 0.6408,
+ "step": 690
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6999836167379228e-05,
+ "loss": 0.6997,
+ "step": 691
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6990593607346276e-05,
+ "loss": 0.7199,
+ "step": 692
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.698133935343123e-05,
+ "loss": 0.7359,
+ "step": 693
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6972073421114626e-05,
+ "loss": 0.7051,
+ "step": 694
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6962795825896537e-05,
+ "loss": 0.6857,
+ "step": 695
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.695350658329655e-05,
+ "loss": 0.7659,
+ "step": 696
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6944205708853735e-05,
+ "loss": 0.7108,
+ "step": 697
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6934893218126607e-05,
+ "loss": 0.6986,
+ "step": 698
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6925569126693135e-05,
+ "loss": 0.6986,
+ "step": 699
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6916233450150675e-05,
+ "loss": 0.7506,
+ "step": 700
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6906886204115974e-05,
+ "loss": 0.7299,
+ "step": 701
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6897527404225128e-05,
+ "loss": 0.7058,
+ "step": 702
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6888157066133555e-05,
+ "loss": 0.767,
+ "step": 703
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6878775205515983e-05,
+ "loss": 0.6931,
+ "step": 704
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6869381838066412e-05,
+ "loss": 0.6984,
+ "step": 705
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.685997697949809e-05,
+ "loss": 0.7397,
+ "step": 706
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6850560645543485e-05,
+ "loss": 0.7057,
+ "step": 707
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.684113285195427e-05,
+ "loss": 0.7594,
+ "step": 708
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6831693614501272e-05,
+ "loss": 0.6686,
+ "step": 709
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6822242948974475e-05,
+ "loss": 0.7159,
+ "step": 710
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6812780871182976e-05,
+ "loss": 0.6054,
+ "step": 711
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6803307396954962e-05,
+ "loss": 0.7088,
+ "step": 712
+ },
+ {
+ "epoch": 0.28,
+ "learning_rate": 1.6793822542137683e-05,
+ "loss": 0.6949,
+ "step": 713
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6784326322597427e-05,
+ "loss": 0.6904,
+ "step": 714
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6774818754219495e-05,
+ "loss": 0.7089,
+ "step": 715
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.676529985290817e-05,
+ "loss": 0.6212,
+ "step": 716
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.675576963458669e-05,
+ "loss": 0.7531,
+ "step": 717
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.674622811519723e-05,
+ "loss": 0.7467,
+ "step": 718
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.673667531070087e-05,
+ "loss": 0.8059,
+ "step": 719
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.672711123707756e-05,
+ "loss": 0.7808,
+ "step": 720
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6717535910326108e-05,
+ "loss": 0.6887,
+ "step": 721
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6707949346464147e-05,
+ "loss": 0.7268,
+ "step": 722
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.66983515615281e-05,
+ "loss": 0.6902,
+ "step": 723
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6688742571573167e-05,
+ "loss": 0.7593,
+ "step": 724
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6679122392673284e-05,
+ "loss": 0.6757,
+ "step": 725
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6669491040921123e-05,
+ "loss": 0.6641,
+ "step": 726
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.665984853242802e-05,
+ "loss": 0.7411,
+ "step": 727
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6650194883323995e-05,
+ "loss": 0.6634,
+ "step": 728
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.664053010975769e-05,
+ "loss": 0.7874,
+ "step": 729
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6630854227896364e-05,
+ "loss": 0.7315,
+ "step": 730
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6621167253925857e-05,
+ "loss": 0.7053,
+ "step": 731
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6611469204050566e-05,
+ "loss": 0.7273,
+ "step": 732
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6601760094493408e-05,
+ "loss": 0.7291,
+ "step": 733
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6592039941495803e-05,
+ "loss": 0.7074,
+ "step": 734
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6582308761317652e-05,
+ "loss": 0.7419,
+ "step": 735
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6572566570237293e-05,
+ "loss": 0.6849,
+ "step": 736
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.656281338455149e-05,
+ "loss": 0.6904,
+ "step": 737
+ },
+ {
+ "epoch": 0.29,
+ "learning_rate": 1.6553049220575392e-05,
+ "loss": 0.7739,
+ "step": 738
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6543274094642522e-05,
+ "loss": 0.7094,
+ "step": 739
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6533488023104732e-05,
+ "loss": 0.7876,
+ "step": 740
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6523691022332184e-05,
+ "loss": 0.6795,
+ "step": 741
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6513883108713326e-05,
+ "loss": 0.8014,
+ "step": 742
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6504064298654864e-05,
+ "loss": 0.7374,
+ "step": 743
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6494234608581722e-05,
+ "loss": 0.7254,
+ "step": 744
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6484394054937033e-05,
+ "loss": 0.6805,
+ "step": 745
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64745426541821e-05,
+ "loss": 0.7587,
+ "step": 746
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6464680422796367e-05,
+ "loss": 0.6785,
+ "step": 747
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.64548073772774e-05,
+ "loss": 0.7103,
+ "step": 748
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6444923534140856e-05,
+ "loss": 0.6803,
+ "step": 749
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.643502890992045e-05,
+ "loss": 0.8047,
+ "step": 750
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.642512352116793e-05,
+ "loss": 0.7263,
+ "step": 751
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.641520738445306e-05,
+ "loss": 0.7351,
+ "step": 752
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6405280516363576e-05,
+ "loss": 0.6882,
+ "step": 753
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.639534293350517e-05,
+ "loss": 0.7189,
+ "step": 754
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6385394652501444e-05,
+ "loss": 0.6965,
+ "step": 755
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.637543568999392e-05,
+ "loss": 0.7047,
+ "step": 756
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6365466062641974e-05,
+ "loss": 0.7005,
+ "step": 757
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6355485787122814e-05,
+ "loss": 0.7951,
+ "step": 758
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6345494880131476e-05,
+ "loss": 0.6721,
+ "step": 759
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.633549335838077e-05,
+ "loss": 0.711,
+ "step": 760
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.632548123860127e-05,
+ "loss": 0.686,
+ "step": 761
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6315458537541272e-05,
+ "loss": 0.6824,
+ "step": 762
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6305425271966772e-05,
+ "loss": 0.7385,
+ "step": 763
+ },
+ {
+ "epoch": 0.3,
+ "learning_rate": 1.6295381458661447e-05,
+ "loss": 0.6968,
+ "step": 764
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6285327114426608e-05,
+ "loss": 0.7117,
+ "step": 765
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.627526225608119e-05,
+ "loss": 0.7209,
+ "step": 766
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6265186900461715e-05,
+ "loss": 0.661,
+ "step": 767
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6255101064422252e-05,
+ "loss": 0.606,
+ "step": 768
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6245004764834423e-05,
+ "loss": 0.6404,
+ "step": 769
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6234898018587336e-05,
+ "loss": 0.7529,
+ "step": 770
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6224780842587586e-05,
+ "loss": 0.7078,
+ "step": 771
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6214653253759206e-05,
+ "loss": 0.7056,
+ "step": 772
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6204515269043653e-05,
+ "loss": 0.7808,
+ "step": 773
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.619436690539977e-05,
+ "loss": 0.7293,
+ "step": 774
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6184208179803772e-05,
+ "loss": 0.7309,
+ "step": 775
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.617403910924919e-05,
+ "loss": 0.6725,
+ "step": 776
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6163859710746878e-05,
+ "loss": 0.7228,
+ "step": 777
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6153670001324955e-05,
+ "loss": 0.6878,
+ "step": 778
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6143469998028792e-05,
+ "loss": 0.7109,
+ "step": 779
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.613325971792098e-05,
+ "loss": 0.6732,
+ "step": 780
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6123039178081303e-05,
+ "loss": 0.7588,
+ "step": 781
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.61128083956067e-05,
+ "loss": 0.7235,
+ "step": 782
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.610256738761125e-05,
+ "loss": 0.7364,
+ "step": 783
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.609231617122614e-05,
+ "loss": 0.7147,
+ "step": 784
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6082054763599627e-05,
+ "loss": 0.6878,
+ "step": 785
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.607178318189702e-05,
+ "loss": 0.7373,
+ "step": 786
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6061501443300644e-05,
+ "loss": 0.7497,
+ "step": 787
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.605120956500982e-05,
+ "loss": 0.6688,
+ "step": 788
+ },
+ {
+ "epoch": 0.31,
+ "learning_rate": 1.6040907564240823e-05,
+ "loss": 0.7833,
+ "step": 789
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6030595458226872e-05,
+ "loss": 0.6807,
+ "step": 790
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.6020273264218076e-05,
+ "loss": 0.6615,
+ "step": 791
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.600994099948143e-05,
+ "loss": 0.6854,
+ "step": 792
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5999598681300766e-05,
+ "loss": 0.7447,
+ "step": 793
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5989246326976746e-05,
+ "loss": 0.6892,
+ "step": 794
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5978883953826814e-05,
+ "loss": 0.6998,
+ "step": 795
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.596851157918517e-05,
+ "loss": 0.7184,
+ "step": 796
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5958129220402745e-05,
+ "loss": 0.7148,
+ "step": 797
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.594773689484718e-05,
+ "loss": 0.694,
+ "step": 798
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.593733461990278e-05,
+ "loss": 0.758,
+ "step": 799
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5926922412970495e-05,
+ "loss": 0.653,
+ "step": 800
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5916500291467892e-05,
+ "loss": 0.7461,
+ "step": 801
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5906068272829122e-05,
+ "loss": 0.747,
+ "step": 802
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.589562637450489e-05,
+ "loss": 0.6559,
+ "step": 803
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5885174613962427e-05,
+ "loss": 0.7928,
+ "step": 804
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5874713008685464e-05,
+ "loss": 0.7488,
+ "step": 805
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5864241576174205e-05,
+ "loss": 0.6615,
+ "step": 806
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5853760333945282e-05,
+ "loss": 0.6597,
+ "step": 807
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5843269299531748e-05,
+ "loss": 0.676,
+ "step": 808
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.583276849048303e-05,
+ "loss": 0.7231,
+ "step": 809
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5822257924364902e-05,
+ "loss": 0.675,
+ "step": 810
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.581173761875947e-05,
+ "loss": 0.6511,
+ "step": 811
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.580120759126512e-05,
+ "loss": 0.7408,
+ "step": 812
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.579066785949652e-05,
+ "loss": 0.6735,
+ "step": 813
+ },
+ {
+ "epoch": 0.32,
+ "learning_rate": 1.5780118441084544e-05,
+ "loss": 0.7221,
+ "step": 814
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5769559353676297e-05,
+ "loss": 0.7179,
+ "step": 815
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5758990614935042e-05,
+ "loss": 0.674,
+ "step": 816
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5748412242540193e-05,
+ "loss": 0.7702,
+ "step": 817
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5737824254187275e-05,
+ "loss": 0.7155,
+ "step": 818
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5727226667587898e-05,
+ "loss": 0.7146,
+ "step": 819
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5716619500469735e-05,
+ "loss": 0.7073,
+ "step": 820
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5706002770576478e-05,
+ "loss": 0.7464,
+ "step": 821
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5695376495667828e-05,
+ "loss": 0.7111,
+ "step": 822
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.568474069351943e-05,
+ "loss": 0.7569,
+ "step": 823
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5674095381922886e-05,
+ "loss": 0.773,
+ "step": 824
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5663440578685706e-05,
+ "loss": 0.721,
+ "step": 825
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.565277630163126e-05,
+ "loss": 0.6997,
+ "step": 826
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.564210256859878e-05,
+ "loss": 0.7547,
+ "step": 827
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.563141939744332e-05,
+ "loss": 0.6469,
+ "step": 828
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.56207268060357e-05,
+ "loss": 0.7547,
+ "step": 829
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5610024812262526e-05,
+ "loss": 0.6557,
+ "step": 830
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.559931343402612e-05,
+ "loss": 0.7523,
+ "step": 831
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5588592689244492e-05,
+ "loss": 0.7843,
+ "step": 832
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5577862595851335e-05,
+ "loss": 0.6991,
+ "step": 833
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.556712317179598e-05,
+ "loss": 0.6982,
+ "step": 834
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5556374435043353e-05,
+ "loss": 0.6998,
+ "step": 835
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5545616403573977e-05,
+ "loss": 0.7265,
+ "step": 836
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5534849095383902e-05,
+ "loss": 0.7243,
+ "step": 837
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5524072528484716e-05,
+ "loss": 0.7012,
+ "step": 838
+ },
+ {
+ "epoch": 0.33,
+ "learning_rate": 1.5513286720903488e-05,
+ "loss": 0.6841,
+ "step": 839
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5502491690682732e-05,
+ "loss": 0.7127,
+ "step": 840
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5491687455880414e-05,
+ "loss": 0.6614,
+ "step": 841
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5480874034569876e-05,
+ "loss": 0.7025,
+ "step": 842
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.547005144483984e-05,
+ "loss": 0.6808,
+ "step": 843
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5459219704794358e-05,
+ "loss": 0.6096,
+ "step": 844
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5448378832552796e-05,
+ "loss": 0.7217,
+ "step": 845
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5437528846249783e-05,
+ "loss": 0.7117,
+ "step": 846
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.542666976403521e-05,
+ "loss": 0.6824,
+ "step": 847
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.541580160407417e-05,
+ "loss": 0.7198,
+ "step": 848
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5404924384546955e-05,
+ "loss": 0.7591,
+ "step": 849
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5394038123649e-05,
+ "loss": 0.6404,
+ "step": 850
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5383142839590872e-05,
+ "loss": 0.7637,
+ "step": 851
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5372238550598225e-05,
+ "loss": 0.6484,
+ "step": 852
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.536132527491178e-05,
+ "loss": 0.6782,
+ "step": 853
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.53504030307873e-05,
+ "loss": 0.7377,
+ "step": 854
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5339471836495524e-05,
+ "loss": 0.7339,
+ "step": 855
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5328531710322198e-05,
+ "loss": 0.7107,
+ "step": 856
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5317582670567986e-05,
+ "loss": 0.7274,
+ "step": 857
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5306624735548465e-05,
+ "loss": 0.707,
+ "step": 858
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52956579235941e-05,
+ "loss": 0.7398,
+ "step": 859
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.52846822530502e-05,
+ "loss": 0.7255,
+ "step": 860
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.527369774227689e-05,
+ "loss": 0.763,
+ "step": 861
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.526270440964909e-05,
+ "loss": 0.7146,
+ "step": 862
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5251702273556475e-05,
+ "loss": 0.7257,
+ "step": 863
+ },
+ {
+ "epoch": 0.34,
+ "learning_rate": 1.5240691352403441e-05,
+ "loss": 0.7373,
+ "step": 864
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5229671664609088e-05,
+ "loss": 0.7568,
+ "step": 865
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.521864322860717e-05,
+ "loss": 0.6856,
+ "step": 866
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5207606062846092e-05,
+ "loss": 0.7851,
+ "step": 867
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5196560185788842e-05,
+ "loss": 0.6876,
+ "step": 868
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5185505615912998e-05,
+ "loss": 0.7136,
+ "step": 869
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.517444237171067e-05,
+ "loss": 0.7548,
+ "step": 870
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5163370471688471e-05,
+ "loss": 0.7294,
+ "step": 871
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5152289934367516e-05,
+ "loss": 0.645,
+ "step": 872
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5141200778283345e-05,
+ "loss": 0.6467,
+ "step": 873
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5130103021985929e-05,
+ "loss": 0.7789,
+ "step": 874
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5118996684039621e-05,
+ "loss": 0.7124,
+ "step": 875
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5107881783023138e-05,
+ "loss": 0.6568,
+ "step": 876
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.50967583375295e-05,
+ "loss": 0.7246,
+ "step": 877
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5085626366166039e-05,
+ "loss": 0.7525,
+ "step": 878
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5074485887554351e-05,
+ "loss": 0.6689,
+ "step": 879
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5063336920330244e-05,
+ "loss": 0.7083,
+ "step": 880
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5052179483143752e-05,
+ "loss": 0.7408,
+ "step": 881
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5041013594659051e-05,
+ "loss": 0.7544,
+ "step": 882
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5029839273554469e-05,
+ "loss": 0.7016,
+ "step": 883
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5018656538522442e-05,
+ "loss": 0.7271,
+ "step": 884
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.5007465408269477e-05,
+ "loss": 0.6163,
+ "step": 885
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4996265901516116e-05,
+ "loss": 0.6774,
+ "step": 886
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4985058036996929e-05,
+ "loss": 0.7443,
+ "step": 887
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4973841833460457e-05,
+ "loss": 0.6873,
+ "step": 888
+ },
+ {
+ "epoch": 0.35,
+ "learning_rate": 1.4962617309669192e-05,
+ "loss": 0.7031,
+ "step": 889
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4951384484399542e-05,
+ "loss": 0.6881,
+ "step": 890
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4940143376441812e-05,
+ "loss": 0.7069,
+ "step": 891
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.492889400460015e-05,
+ "loss": 0.6763,
+ "step": 892
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.491763638769253e-05,
+ "loss": 0.7011,
+ "step": 893
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.490637054455073e-05,
+ "loss": 0.695,
+ "step": 894
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4895096494020274e-05,
+ "loss": 0.6922,
+ "step": 895
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4883814254960418e-05,
+ "loss": 0.7161,
+ "step": 896
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4872523846244127e-05,
+ "loss": 0.6947,
+ "step": 897
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4861225286758018e-05,
+ "loss": 0.7732,
+ "step": 898
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4849918595402353e-05,
+ "loss": 0.6959,
+ "step": 899
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4838603791090989e-05,
+ "loss": 0.7235,
+ "step": 900
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4827280892751364e-05,
+ "loss": 0.6179,
+ "step": 901
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4815949919324443e-05,
+ "loss": 0.7104,
+ "step": 902
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4804610889764712e-05,
+ "loss": 0.7206,
+ "step": 903
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4793263823040127e-05,
+ "loss": 0.7685,
+ "step": 904
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4781908738132085e-05,
+ "loss": 0.6827,
+ "step": 905
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4770545654035403e-05,
+ "loss": 0.7053,
+ "step": 906
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4759174589758272e-05,
+ "loss": 0.667,
+ "step": 907
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4747795564322243e-05,
+ "loss": 0.7323,
+ "step": 908
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.473640859676217e-05,
+ "loss": 0.6712,
+ "step": 909
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4725013706126207e-05,
+ "loss": 0.7165,
+ "step": 910
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.471361091147575e-05,
+ "loss": 0.7363,
+ "step": 911
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4702200231885425e-05,
+ "loss": 0.749,
+ "step": 912
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4690781686443044e-05,
+ "loss": 0.6708,
+ "step": 913
+ },
+ {
+ "epoch": 0.36,
+ "learning_rate": 1.4679355294249575e-05,
+ "loss": 0.7017,
+ "step": 914
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4667921074419118e-05,
+ "loss": 0.7179,
+ "step": 915
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4656479046078862e-05,
+ "loss": 0.6854,
+ "step": 916
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4645029228369062e-05,
+ "loss": 0.7164,
+ "step": 917
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4633571640442998e-05,
+ "loss": 0.7216,
+ "step": 918
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4622106301466956e-05,
+ "loss": 0.6957,
+ "step": 919
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4610633230620179e-05,
+ "loss": 0.7225,
+ "step": 920
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4599152447094846e-05,
+ "loss": 0.6873,
+ "step": 921
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4587663970096048e-05,
+ "loss": 0.6542,
+ "step": 922
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4576167818841732e-05,
+ "loss": 0.8246,
+ "step": 923
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4564664012562686e-05,
+ "loss": 0.7557,
+ "step": 924
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4553152570502506e-05,
+ "loss": 0.7099,
+ "step": 925
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4541633511917567e-05,
+ "loss": 0.6801,
+ "step": 926
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.453010685607697e-05,
+ "loss": 0.7334,
+ "step": 927
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.451857262226254e-05,
+ "loss": 0.6867,
+ "step": 928
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4507030829768765e-05,
+ "loss": 0.7394,
+ "step": 929
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4495481497902789e-05,
+ "loss": 0.7298,
+ "step": 930
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4483924645984359e-05,
+ "loss": 0.716,
+ "step": 931
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4472360293345807e-05,
+ "loss": 0.7513,
+ "step": 932
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4460788459332011e-05,
+ "loss": 0.6896,
+ "step": 933
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.444920916330036e-05,
+ "loss": 0.6696,
+ "step": 934
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4437622424620733e-05,
+ "loss": 0.6802,
+ "step": 935
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4426028262675451e-05,
+ "loss": 0.708,
+ "step": 936
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.441442669685926e-05,
+ "loss": 0.7535,
+ "step": 937
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.440281774657929e-05,
+ "loss": 0.6487,
+ "step": 938
+ },
+ {
+ "epoch": 0.37,
+ "learning_rate": 1.4391201431255014e-05,
+ "loss": 0.6999,
+ "step": 939
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4379577770318234e-05,
+ "loss": 0.6494,
+ "step": 940
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.436794678321304e-05,
+ "loss": 0.72,
+ "step": 941
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4356308489395783e-05,
+ "loss": 0.7062,
+ "step": 942
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4344662908335018e-05,
+ "loss": 0.7004,
+ "step": 943
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4333010059511505e-05,
+ "loss": 0.7117,
+ "step": 944
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4321349962418163e-05,
+ "loss": 0.7065,
+ "step": 945
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4309682636560027e-05,
+ "loss": 0.65,
+ "step": 946
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4298008101454226e-05,
+ "loss": 0.7101,
+ "step": 947
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4286326376629956e-05,
+ "loss": 0.7097,
+ "step": 948
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4274637481628431e-05,
+ "loss": 0.7271,
+ "step": 949
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4262941436002862e-05,
+ "loss": 0.7428,
+ "step": 950
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.425123825931843e-05,
+ "loss": 0.7166,
+ "step": 951
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.423952797115223e-05,
+ "loss": 0.693,
+ "step": 952
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4227810591093262e-05,
+ "loss": 0.6703,
+ "step": 953
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4216086138742395e-05,
+ "loss": 0.7456,
+ "step": 954
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4204354633712312e-05,
+ "loss": 0.6491,
+ "step": 955
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4192616095627506e-05,
+ "loss": 0.7326,
+ "step": 956
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4180870544124236e-05,
+ "loss": 0.7075,
+ "step": 957
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.416911799885049e-05,
+ "loss": 0.7633,
+ "step": 958
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4157358479465949e-05,
+ "loss": 0.7491,
+ "step": 959
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4145592005641967e-05,
+ "loss": 0.7696,
+ "step": 960
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.413381859706153e-05,
+ "loss": 0.6538,
+ "step": 961
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.412203827341923e-05,
+ "loss": 0.7178,
+ "step": 962
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4110251054421218e-05,
+ "loss": 0.6657,
+ "step": 963
+ },
+ {
+ "epoch": 0.38,
+ "learning_rate": 1.4098456959785178e-05,
+ "loss": 0.7018,
+ "step": 964
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4086656009240308e-05,
+ "loss": 0.662,
+ "step": 965
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4074848222527262e-05,
+ "loss": 0.7544,
+ "step": 966
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4063033619398136e-05,
+ "loss": 0.7763,
+ "step": 967
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4051212219616427e-05,
+ "loss": 0.6973,
+ "step": 968
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4039384042957003e-05,
+ "loss": 0.703,
+ "step": 969
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4027549109206064e-05,
+ "loss": 0.768,
+ "step": 970
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4015707438161122e-05,
+ "loss": 0.6641,
+ "step": 971
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.4003859049630943e-05,
+ "loss": 0.6927,
+ "step": 972
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.399200396343555e-05,
+ "loss": 0.6963,
+ "step": 973
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3980142199406155e-05,
+ "loss": 0.6638,
+ "step": 974
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3968273777385146e-05,
+ "loss": 0.7212,
+ "step": 975
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3956398717226055e-05,
+ "loss": 0.6794,
+ "step": 976
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.39445170387935e-05,
+ "loss": 0.6819,
+ "step": 977
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3932628761963192e-05,
+ "loss": 0.7442,
+ "step": 978
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3920733906621861e-05,
+ "loss": 0.7256,
+ "step": 979
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3908832492667261e-05,
+ "loss": 0.6193,
+ "step": 980
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3896924540008098e-05,
+ "loss": 0.7534,
+ "step": 981
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3885010068564026e-05,
+ "loss": 0.6721,
+ "step": 982
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3873089098265606e-05,
+ "loss": 0.7531,
+ "step": 983
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.386116164905426e-05,
+ "loss": 0.7573,
+ "step": 984
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3849227740882257e-05,
+ "loss": 0.6604,
+ "step": 985
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3837287393712666e-05,
+ "loss": 0.6975,
+ "step": 986
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3825340627519337e-05,
+ "loss": 0.7443,
+ "step": 987
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3813387462286839e-05,
+ "loss": 0.7458,
+ "step": 988
+ },
+ {
+ "epoch": 0.39,
+ "learning_rate": 1.3801427918010461e-05,
+ "loss": 0.7775,
+ "step": 989
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.378946201469616e-05,
+ "loss": 0.7355,
+ "step": 990
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3777489772360521e-05,
+ "loss": 0.7355,
+ "step": 991
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3765511211030753e-05,
+ "loss": 0.7361,
+ "step": 992
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3753526350744611e-05,
+ "loss": 0.8037,
+ "step": 993
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3741535211550402e-05,
+ "loss": 0.7805,
+ "step": 994
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3729537813506932e-05,
+ "loss": 0.6536,
+ "step": 995
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3717534176683485e-05,
+ "loss": 0.6813,
+ "step": 996
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3705524321159764e-05,
+ "loss": 0.7755,
+ "step": 997
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.369350826702589e-05,
+ "loss": 0.6744,
+ "step": 998
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3681486034382347e-05,
+ "loss": 0.7199,
+ "step": 999
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3669457643339956e-05,
+ "loss": 0.6169,
+ "step": 1000
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3657423114019837e-05,
+ "loss": 0.7275,
+ "step": 1001
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3645382466553381e-05,
+ "loss": 0.7462,
+ "step": 1002
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3633335721082215e-05,
+ "loss": 0.6837,
+ "step": 1003
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.362128289775816e-05,
+ "loss": 0.6826,
+ "step": 1004
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3609224016743213e-05,
+ "loss": 0.7712,
+ "step": 1005
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3597159098209493e-05,
+ "loss": 0.7181,
+ "step": 1006
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3585088162339233e-05,
+ "loss": 0.7408,
+ "step": 1007
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.357301122932472e-05,
+ "loss": 0.7074,
+ "step": 1008
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3560928319368278e-05,
+ "loss": 0.6742,
+ "step": 1009
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3548839452682225e-05,
+ "loss": 0.689,
+ "step": 1010
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.353674464948885e-05,
+ "loss": 0.6838,
+ "step": 1011
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3524643930020374e-05,
+ "loss": 0.6906,
+ "step": 1012
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.35125373145189e-05,
+ "loss": 0.7641,
+ "step": 1013
+ },
+ {
+ "epoch": 0.4,
+ "learning_rate": 1.3500424823236413e-05,
+ "loss": 0.6556,
+ "step": 1014
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3488306476434714e-05,
+ "loss": 0.7266,
+ "step": 1015
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3476182294385398e-05,
+ "loss": 0.7978,
+ "step": 1016
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3464052297369838e-05,
+ "loss": 0.7037,
+ "step": 1017
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3451916505679115e-05,
+ "loss": 0.6686,
+ "step": 1018
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.343977493961401e-05,
+ "loss": 0.7411,
+ "step": 1019
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3427627619484968e-05,
+ "loss": 0.7245,
+ "step": 1020
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3415474565612059e-05,
+ "loss": 0.7194,
+ "step": 1021
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3403315798324935e-05,
+ "loss": 0.6111,
+ "step": 1022
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3391151337962813e-05,
+ "loss": 0.6907,
+ "step": 1023
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3378981204874432e-05,
+ "loss": 0.66,
+ "step": 1024
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3366805419418026e-05,
+ "loss": 0.6434,
+ "step": 1025
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3354624001961267e-05,
+ "loss": 0.6933,
+ "step": 1026
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3342436972881272e-05,
+ "loss": 0.6465,
+ "step": 1027
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3330244352564527e-05,
+ "loss": 0.7129,
+ "step": 1028
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3318046161406876e-05,
+ "loss": 0.6547,
+ "step": 1029
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3305842419813488e-05,
+ "loss": 0.7377,
+ "step": 1030
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3293633148198812e-05,
+ "loss": 0.6683,
+ "step": 1031
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3281418366986535e-05,
+ "loss": 0.6383,
+ "step": 1032
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3269198096609592e-05,
+ "loss": 0.7036,
+ "step": 1033
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3256972357510068e-05,
+ "loss": 0.7526,
+ "step": 1034
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.324474117013921e-05,
+ "loss": 0.5901,
+ "step": 1035
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3232504554957385e-05,
+ "loss": 0.7318,
+ "step": 1036
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.322026253243403e-05,
+ "loss": 0.721,
+ "step": 1037
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.3208015123047623e-05,
+ "loss": 0.7119,
+ "step": 1038
+ },
+ {
+ "epoch": 0.41,
+ "learning_rate": 1.319576234728567e-05,
+ "loss": 0.6459,
+ "step": 1039
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3183504225644643e-05,
+ "loss": 0.7144,
+ "step": 1040
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3171240778629952e-05,
+ "loss": 0.6991,
+ "step": 1041
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3158972026755927e-05,
+ "loss": 0.6391,
+ "step": 1042
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3146697990545762e-05,
+ "loss": 0.6687,
+ "step": 1043
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3134418690531498e-05,
+ "loss": 0.7103,
+ "step": 1044
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3122134147253975e-05,
+ "loss": 0.769,
+ "step": 1045
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3109844381262816e-05,
+ "loss": 0.7016,
+ "step": 1046
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3097549413116358e-05,
+ "loss": 0.7064,
+ "step": 1047
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3085249263381661e-05,
+ "loss": 0.7504,
+ "step": 1048
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3072943952634446e-05,
+ "loss": 0.7082,
+ "step": 1049
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3060633501459065e-05,
+ "loss": 0.6378,
+ "step": 1050
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3048317930448472e-05,
+ "loss": 0.7196,
+ "step": 1051
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3035997260204187e-05,
+ "loss": 0.7887,
+ "step": 1052
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.3023671511336253e-05,
+ "loss": 0.6578,
+ "step": 1053
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.301134070446321e-05,
+ "loss": 0.7222,
+ "step": 1054
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2999004860212071e-05,
+ "loss": 0.635,
+ "step": 1055
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2986663999218264e-05,
+ "loss": 0.7335,
+ "step": 1056
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2974318142125604e-05,
+ "loss": 0.5873,
+ "step": 1057
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2961967309586279e-05,
+ "loss": 0.7762,
+ "step": 1058
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2949611522260788e-05,
+ "loss": 0.7179,
+ "step": 1059
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2937250800817922e-05,
+ "loss": 0.6169,
+ "step": 1060
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2924885165934728e-05,
+ "loss": 0.6037,
+ "step": 1061
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2912514638296472e-05,
+ "loss": 0.7387,
+ "step": 1062
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.2900139238596598e-05,
+ "loss": 0.7469,
+ "step": 1063
+ },
+ {
+ "epoch": 0.42,
+ "learning_rate": 1.288775898753671e-05,
+ "loss": 0.7598,
+ "step": 1064
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.287537390582652e-05,
+ "loss": 0.6809,
+ "step": 1065
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.286298401418382e-05,
+ "loss": 0.679,
+ "step": 1066
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2850589333334455e-05,
+ "loss": 0.6083,
+ "step": 1067
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2838189884012275e-05,
+ "loss": 0.7084,
+ "step": 1068
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2825785686959113e-05,
+ "loss": 0.7492,
+ "step": 1069
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2813376762924735e-05,
+ "loss": 0.7134,
+ "step": 1070
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2800963132666825e-05,
+ "loss": 0.6954,
+ "step": 1071
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.278854481695093e-05,
+ "loss": 0.6654,
+ "step": 1072
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.277612183655044e-05,
+ "loss": 0.6763,
+ "step": 1073
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.276369421224655e-05,
+ "loss": 0.7339,
+ "step": 1074
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2751261964828214e-05,
+ "loss": 0.7597,
+ "step": 1075
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.273882511509213e-05,
+ "loss": 0.5866,
+ "step": 1076
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.272638368384269e-05,
+ "loss": 0.7166,
+ "step": 1077
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2713937691891953e-05,
+ "loss": 0.7093,
+ "step": 1078
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2701487160059601e-05,
+ "loss": 0.7235,
+ "step": 1079
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2689032109172914e-05,
+ "loss": 0.6356,
+ "step": 1080
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2676572560066736e-05,
+ "loss": 0.7843,
+ "step": 1081
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2664108533583428e-05,
+ "loss": 0.6625,
+ "step": 1082
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2651640050572845e-05,
+ "loss": 0.7296,
+ "step": 1083
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2639167131892294e-05,
+ "loss": 0.6955,
+ "step": 1084
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2626689798406506e-05,
+ "loss": 0.6903,
+ "step": 1085
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2614208070987592e-05,
+ "loss": 0.629,
+ "step": 1086
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.2601721970515024e-05,
+ "loss": 0.7304,
+ "step": 1087
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.258923151787557e-05,
+ "loss": 0.6512,
+ "step": 1088
+ },
+ {
+ "epoch": 0.43,
+ "learning_rate": 1.25767367339633e-05,
+ "loss": 0.7502,
+ "step": 1089
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2564237639679512e-05,
+ "loss": 0.7133,
+ "step": 1090
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2551734255932728e-05,
+ "loss": 0.6705,
+ "step": 1091
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2539226603638633e-05,
+ "loss": 0.6991,
+ "step": 1092
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.252671470372006e-05,
+ "loss": 0.6887,
+ "step": 1093
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2514198577106948e-05,
+ "loss": 0.6877,
+ "step": 1094
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.25016782447363e-05,
+ "loss": 0.6943,
+ "step": 1095
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2489153727552164e-05,
+ "loss": 0.7548,
+ "step": 1096
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.247662504650558e-05,
+ "loss": 0.7514,
+ "step": 1097
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2464092222554554e-05,
+ "loss": 0.6414,
+ "step": 1098
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2451555276664032e-05,
+ "loss": 0.6847,
+ "step": 1099
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2439014229805836e-05,
+ "loss": 0.7201,
+ "step": 1100
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2426469102958671e-05,
+ "loss": 0.7557,
+ "step": 1101
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2413919917108053e-05,
+ "loss": 0.6389,
+ "step": 1102
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2401366693246289e-05,
+ "loss": 0.6397,
+ "step": 1103
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2388809452372442e-05,
+ "loss": 0.6744,
+ "step": 1104
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2376248215492297e-05,
+ "loss": 0.7155,
+ "step": 1105
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2363683003618322e-05,
+ "loss": 0.7283,
+ "step": 1106
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.235111383776963e-05,
+ "loss": 0.6579,
+ "step": 1107
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.233854073897196e-05,
+ "loss": 0.7328,
+ "step": 1108
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2325963728257616e-05,
+ "loss": 0.7098,
+ "step": 1109
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2313382826665456e-05,
+ "loss": 0.6046,
+ "step": 1110
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2300798055240836e-05,
+ "loss": 0.6732,
+ "step": 1111
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2288209435035605e-05,
+ "loss": 0.7059,
+ "step": 1112
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2275616987108028e-05,
+ "loss": 0.6711,
+ "step": 1113
+ },
+ {
+ "epoch": 0.44,
+ "learning_rate": 1.2263020732522786e-05,
+ "loss": 0.6965,
+ "step": 1114
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2250420692350921e-05,
+ "loss": 0.6542,
+ "step": 1115
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2237816887669817e-05,
+ "loss": 0.7208,
+ "step": 1116
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2225209339563144e-05,
+ "loss": 0.6147,
+ "step": 1117
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2212598069120845e-05,
+ "loss": 0.6811,
+ "step": 1118
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2199983097439079e-05,
+ "loss": 0.6177,
+ "step": 1119
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2187364445620203e-05,
+ "loss": 0.626,
+ "step": 1120
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2174742134772726e-05,
+ "loss": 0.6581,
+ "step": 1121
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2162116186011285e-05,
+ "loss": 0.713,
+ "step": 1122
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2149486620456599e-05,
+ "loss": 0.6635,
+ "step": 1123
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2136853459235427e-05,
+ "loss": 0.6372,
+ "step": 1124
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2124216723480565e-05,
+ "loss": 0.7154,
+ "step": 1125
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2111576434330767e-05,
+ "loss": 0.6859,
+ "step": 1126
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.209893261293074e-05,
+ "loss": 0.6481,
+ "step": 1127
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2086285280431103e-05,
+ "loss": 0.6611,
+ "step": 1128
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2073634457988342e-05,
+ "loss": 0.7246,
+ "step": 1129
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2060980166764782e-05,
+ "loss": 0.6693,
+ "step": 1130
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2048322427928556e-05,
+ "loss": 0.7048,
+ "step": 1131
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2035661262653562e-05,
+ "loss": 0.6535,
+ "step": 1132
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2022996692119424e-05,
+ "loss": 0.7585,
+ "step": 1133
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.2010328737511466e-05,
+ "loss": 0.6413,
+ "step": 1134
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1997657420020678e-05,
+ "loss": 0.7346,
+ "step": 1135
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1984982760843666e-05,
+ "loss": 0.6788,
+ "step": 1136
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1972304781182636e-05,
+ "loss": 0.7235,
+ "step": 1137
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.195962350224534e-05,
+ "loss": 0.6626,
+ "step": 1138
+ },
+ {
+ "epoch": 0.45,
+ "learning_rate": 1.1946938945245052e-05,
+ "loss": 0.6636,
+ "step": 1139
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1934251131400532e-05,
+ "loss": 0.6689,
+ "step": 1140
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1921560081935986e-05,
+ "loss": 0.6384,
+ "step": 1141
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1908865818081032e-05,
+ "loss": 0.6983,
+ "step": 1142
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1896168361070664e-05,
+ "loss": 0.7709,
+ "step": 1143
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1883467732145224e-05,
+ "loss": 0.6267,
+ "step": 1144
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1870763952550347e-05,
+ "loss": 0.7241,
+ "step": 1145
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1858057043536955e-05,
+ "loss": 0.6788,
+ "step": 1146
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1845347026361192e-05,
+ "loss": 0.7652,
+ "step": 1147
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1832633922284407e-05,
+ "loss": 0.7095,
+ "step": 1148
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1819917752573109e-05,
+ "loss": 0.6432,
+ "step": 1149
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1807198538498944e-05,
+ "loss": 0.7124,
+ "step": 1150
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.179447630133864e-05,
+ "loss": 0.6719,
+ "step": 1151
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1781751062373982e-05,
+ "loss": 0.6332,
+ "step": 1152
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1769022842891794e-05,
+ "loss": 0.6962,
+ "step": 1153
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1756291664183858e-05,
+ "loss": 0.712,
+ "step": 1154
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1743557547546928e-05,
+ "loss": 0.6591,
+ "step": 1155
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1730820514282669e-05,
+ "loss": 0.702,
+ "step": 1156
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1718080585697617e-05,
+ "loss": 0.7442,
+ "step": 1157
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1705337783103155e-05,
+ "loss": 0.727,
+ "step": 1158
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1692592127815475e-05,
+ "loss": 0.7378,
+ "step": 1159
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1679843641155544e-05,
+ "loss": 0.6883,
+ "step": 1160
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1667092344449053e-05,
+ "loss": 0.7024,
+ "step": 1161
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1654338259026413e-05,
+ "loss": 0.6769,
+ "step": 1162
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1641581406222682e-05,
+ "loss": 0.6521,
+ "step": 1163
+ },
+ {
+ "epoch": 0.46,
+ "learning_rate": 1.1628821807377557e-05,
+ "loss": 0.7504,
+ "step": 1164
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1616059483835325e-05,
+ "loss": 0.6452,
+ "step": 1165
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1603294456944835e-05,
+ "loss": 0.6708,
+ "step": 1166
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1590526748059453e-05,
+ "loss": 0.7028,
+ "step": 1167
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1577756378537033e-05,
+ "loss": 0.6441,
+ "step": 1168
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1564983369739884e-05,
+ "loss": 0.6893,
+ "step": 1169
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.155220774303472e-05,
+ "loss": 0.5931,
+ "step": 1170
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1539429519792647e-05,
+ "loss": 0.648,
+ "step": 1171
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1526648721389105e-05,
+ "loss": 0.7424,
+ "step": 1172
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1513865369203844e-05,
+ "loss": 0.6437,
+ "step": 1173
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1501079484620887e-05,
+ "loss": 0.7256,
+ "step": 1174
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1488291089028501e-05,
+ "loss": 0.775,
+ "step": 1175
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1475500203819135e-05,
+ "loss": 0.7039,
+ "step": 1176
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1462706850389413e-05,
+ "loss": 0.6894,
+ "step": 1177
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1449911050140097e-05,
+ "loss": 0.6686,
+ "step": 1178
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1437112824476025e-05,
+ "loss": 0.7134,
+ "step": 1179
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.14243121948061e-05,
+ "loss": 0.7235,
+ "step": 1180
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1411509182543253e-05,
+ "loss": 0.7287,
+ "step": 1181
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1398703809104392e-05,
+ "loss": 0.6872,
+ "step": 1182
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1385896095910374e-05,
+ "loss": 0.6998,
+ "step": 1183
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1373086064385974e-05,
+ "loss": 0.7422,
+ "step": 1184
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1360273735959848e-05,
+ "loss": 0.7095,
+ "step": 1185
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1347459132064485e-05,
+ "loss": 0.6294,
+ "step": 1186
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1334642274136186e-05,
+ "loss": 0.702,
+ "step": 1187
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.1321823183615027e-05,
+ "loss": 0.6915,
+ "step": 1188
+ },
+ {
+ "epoch": 0.47,
+ "learning_rate": 1.130900188194481e-05,
+ "loss": 0.7723,
+ "step": 1189
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1296178390573038e-05,
+ "loss": 0.7402,
+ "step": 1190
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1283352730950886e-05,
+ "loss": 0.698,
+ "step": 1191
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1270524924533146e-05,
+ "loss": 0.6819,
+ "step": 1192
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1257694992778199e-05,
+ "loss": 0.6634,
+ "step": 1193
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1244862957147993e-05,
+ "loss": 0.6872,
+ "step": 1194
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1232028839107985e-05,
+ "loss": 0.6897,
+ "step": 1195
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1219192660127117e-05,
+ "loss": 0.6707,
+ "step": 1196
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1206354441677782e-05,
+ "loss": 0.7205,
+ "step": 1197
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1193514205235789e-05,
+ "loss": 0.6724,
+ "step": 1198
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1180671972280311e-05,
+ "loss": 0.7152,
+ "step": 1199
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1167827764293867e-05,
+ "loss": 0.6799,
+ "step": 1200
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.115498160276228e-05,
+ "loss": 0.7171,
+ "step": 1201
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1142133509174644e-05,
+ "loss": 0.7057,
+ "step": 1202
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1129283505023273e-05,
+ "loss": 0.6679,
+ "step": 1203
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1116431611803696e-05,
+ "loss": 0.623,
+ "step": 1204
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1103577851014587e-05,
+ "loss": 0.671,
+ "step": 1205
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.109072224415774e-05,
+ "loss": 0.6218,
+ "step": 1206
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1077864812738061e-05,
+ "loss": 0.6662,
+ "step": 1207
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1065005578263483e-05,
+ "loss": 0.7244,
+ "step": 1208
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1052144562244967e-05,
+ "loss": 0.7162,
+ "step": 1209
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1039281786196453e-05,
+ "loss": 0.6849,
+ "step": 1210
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1026417271634821e-05,
+ "loss": 0.7178,
+ "step": 1211
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1013551040079866e-05,
+ "loss": 0.684,
+ "step": 1212
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.1000683113054251e-05,
+ "loss": 0.7168,
+ "step": 1213
+ },
+ {
+ "epoch": 0.48,
+ "learning_rate": 1.0987813512083476e-05,
+ "loss": 0.7618,
+ "step": 1214
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0974942258695838e-05,
+ "loss": 0.6845,
+ "step": 1215
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0962069374422407e-05,
+ "loss": 0.664,
+ "step": 1216
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0949194880796967e-05,
+ "loss": 0.6924,
+ "step": 1217
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0936318799356009e-05,
+ "loss": 0.6956,
+ "step": 1218
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0923441151638676e-05,
+ "loss": 0.7458,
+ "step": 1219
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.091056195918672e-05,
+ "loss": 0.7355,
+ "step": 1220
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0897681243544492e-05,
+ "loss": 0.7088,
+ "step": 1221
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0884799026258879e-05,
+ "loss": 0.6995,
+ "step": 1222
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0871915328879294e-05,
+ "loss": 0.6767,
+ "step": 1223
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.085903017295761e-05,
+ "loss": 0.6943,
+ "step": 1224
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0846143580048152e-05,
+ "loss": 0.6029,
+ "step": 1225
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.083325557170764e-05,
+ "loss": 0.7587,
+ "step": 1226
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0820366169495171e-05,
+ "loss": 0.7338,
+ "step": 1227
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0807475394972164e-05,
+ "loss": 0.7718,
+ "step": 1228
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0794583269702341e-05,
+ "loss": 0.7292,
+ "step": 1229
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0781689815251682e-05,
+ "loss": 0.7948,
+ "step": 1230
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0768795053188379e-05,
+ "loss": 0.6816,
+ "step": 1231
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0755899005082837e-05,
+ "loss": 0.6713,
+ "step": 1232
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0743001692507586e-05,
+ "loss": 0.6755,
+ "step": 1233
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0730103137037284e-05,
+ "loss": 0.6305,
+ "step": 1234
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0717203360248669e-05,
+ "loss": 0.5938,
+ "step": 1235
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.070430238372052e-05,
+ "loss": 0.674,
+ "step": 1236
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0691400229033617e-05,
+ "loss": 0.6196,
+ "step": 1237
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.067849691777072e-05,
+ "loss": 0.7014,
+ "step": 1238
+ },
+ {
+ "epoch": 0.49,
+ "learning_rate": 1.0665592471516518e-05,
+ "loss": 0.6862,
+ "step": 1239
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0652686911857599e-05,
+ "loss": 0.7666,
+ "step": 1240
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0639780260382417e-05,
+ "loss": 0.6846,
+ "step": 1241
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.062687253868125e-05,
+ "loss": 0.7008,
+ "step": 1242
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0613963768346167e-05,
+ "loss": 0.5661,
+ "step": 1243
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.060105397097099e-05,
+ "loss": 0.7456,
+ "step": 1244
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0588143168151257e-05,
+ "loss": 0.6914,
+ "step": 1245
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0575231381484195e-05,
+ "loss": 0.6311,
+ "step": 1246
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0562318632568667e-05,
+ "loss": 0.7017,
+ "step": 1247
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0549404943005157e-05,
+ "loss": 0.7072,
+ "step": 1248
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0536490334395713e-05,
+ "loss": 0.6966,
+ "step": 1249
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0523574828343923e-05,
+ "loss": 0.7291,
+ "step": 1250
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.051065844645488e-05,
+ "loss": 0.6558,
+ "step": 1251
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.049774121033514e-05,
+ "loss": 0.7328,
+ "step": 1252
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0484823141592682e-05,
+ "loss": 0.6775,
+ "step": 1253
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0471904261836888e-05,
+ "loss": 0.6362,
+ "step": 1254
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.045898459267849e-05,
+ "loss": 0.6987,
+ "step": 1255
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0446064155729538e-05,
+ "loss": 0.6874,
+ "step": 1256
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0433142972603378e-05,
+ "loss": 0.674,
+ "step": 1257
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0420221064914592e-05,
+ "loss": 0.6982,
+ "step": 1258
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0407298454278983e-05,
+ "loss": 0.7226,
+ "step": 1259
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0394375162313519e-05,
+ "loss": 0.6538,
+ "step": 1260
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0381451210636321e-05,
+ "loss": 0.7002,
+ "step": 1261
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0368526620866604e-05,
+ "loss": 0.694,
+ "step": 1262
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0355601414624653e-05,
+ "loss": 0.7046,
+ "step": 1263
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0342675613531785e-05,
+ "loss": 0.6118,
+ "step": 1264
+ },
+ {
+ "epoch": 0.5,
+ "learning_rate": 1.0329749239210311e-05,
+ "loss": 0.7287,
+ "step": 1265
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0316822313283504e-05,
+ "loss": 0.6131,
+ "step": 1266
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0303894857375557e-05,
+ "loss": 0.7405,
+ "step": 1267
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0290966893111547e-05,
+ "loss": 0.687,
+ "step": 1268
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0278038442117405e-05,
+ "loss": 0.6308,
+ "step": 1269
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0265109526019878e-05,
+ "loss": 0.7379,
+ "step": 1270
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.025218016644649e-05,
+ "loss": 0.7181,
+ "step": 1271
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0239250385025496e-05,
+ "loss": 0.6575,
+ "step": 1272
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0226320203385877e-05,
+ "loss": 0.6439,
+ "step": 1273
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0213389643157268e-05,
+ "loss": 0.694,
+ "step": 1274
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.020045872596994e-05,
+ "loss": 0.6829,
+ "step": 1275
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0187527473454764e-05,
+ "loss": 0.7181,
+ "step": 1276
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0174595907243172e-05,
+ "loss": 0.6897,
+ "step": 1277
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.016166404896712e-05,
+ "loss": 0.5888,
+ "step": 1278
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.014873192025905e-05,
+ "loss": 0.7406,
+ "step": 1279
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0135799542751861e-05,
+ "loss": 0.6921,
+ "step": 1280
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0122866938078863e-05,
+ "loss": 0.6279,
+ "step": 1281
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0109934127873751e-05,
+ "loss": 0.6221,
+ "step": 1282
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0097001133770561e-05,
+ "loss": 0.7319,
+ "step": 1283
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0084067977403637e-05,
+ "loss": 0.7252,
+ "step": 1284
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0071134680407593e-05,
+ "loss": 0.5705,
+ "step": 1285
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0058201264417285e-05,
+ "loss": 0.7031,
+ "step": 1286
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0045267751067758e-05,
+ "loss": 0.6709,
+ "step": 1287
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0032334161994225e-05,
+ "loss": 0.7739,
+ "step": 1288
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0019400518832028e-05,
+ "loss": 0.6752,
+ "step": 1289
+ },
+ {
+ "epoch": 0.51,
+ "learning_rate": 1.0006466843216593e-05,
+ "loss": 0.6813,
+ "step": 1290
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.993533156783409e-06,
+ "loss": 0.6199,
+ "step": 1291
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.980599481167977e-06,
+ "loss": 0.6959,
+ "step": 1292
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.967665838005777e-06,
+ "loss": 0.7096,
+ "step": 1293
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.954732248932243e-06,
+ "loss": 0.6722,
+ "step": 1294
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.941798735582718e-06,
+ "loss": 0.7425,
+ "step": 1295
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.92886531959241e-06,
+ "loss": 0.6747,
+ "step": 1296
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.915932022596368e-06,
+ "loss": 0.6449,
+ "step": 1297
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.90299886622944e-06,
+ "loss": 0.6156,
+ "step": 1298
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.890065872126252e-06,
+ "loss": 0.7066,
+ "step": 1299
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.877133061921138e-06,
+ "loss": 0.6898,
+ "step": 1300
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.864200457248144e-06,
+ "loss": 0.6227,
+ "step": 1301
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.851268079740951e-06,
+ "loss": 0.6608,
+ "step": 1302
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.838335951032883e-06,
+ "loss": 0.6861,
+ "step": 1303
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.82540409275683e-06,
+ "loss": 0.6155,
+ "step": 1304
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.81247252654524e-06,
+ "loss": 0.6514,
+ "step": 1305
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.799541274030065e-06,
+ "loss": 0.6746,
+ "step": 1306
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.786610356842734e-06,
+ "loss": 0.6353,
+ "step": 1307
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.773679796614124e-06,
+ "loss": 0.6465,
+ "step": 1308
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.760749614974506e-06,
+ "loss": 0.6705,
+ "step": 1309
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.747819833553516e-06,
+ "loss": 0.6071,
+ "step": 1310
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.734890473980122e-06,
+ "loss": 0.6591,
+ "step": 1311
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.721961557882597e-06,
+ "loss": 0.6533,
+ "step": 1312
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.709033106888456e-06,
+ "loss": 0.6904,
+ "step": 1313
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.696105142624448e-06,
+ "loss": 0.7109,
+ "step": 1314
+ },
+ {
+ "epoch": 0.52,
+ "learning_rate": 9.683177686716501e-06,
+ "loss": 0.6985,
+ "step": 1315
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.670250760789692e-06,
+ "loss": 0.6449,
+ "step": 1316
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.657324386468217e-06,
+ "loss": 0.7731,
+ "step": 1317
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.644398585375352e-06,
+ "loss": 0.7565,
+ "step": 1318
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.6314733791334e-06,
+ "loss": 0.6766,
+ "step": 1319
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.61854878936368e-06,
+ "loss": 0.7088,
+ "step": 1320
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.605624837686483e-06,
+ "loss": 0.7412,
+ "step": 1321
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.592701545721022e-06,
+ "loss": 0.6407,
+ "step": 1322
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.57977893508541e-06,
+ "loss": 0.6667,
+ "step": 1323
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.566857027396627e-06,
+ "loss": 0.6161,
+ "step": 1324
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.553935844270464e-06,
+ "loss": 0.73,
+ "step": 1325
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.541015407321514e-06,
+ "loss": 0.7163,
+ "step": 1326
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.528095738163117e-06,
+ "loss": 0.6461,
+ "step": 1327
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.515176858407323e-06,
+ "loss": 0.6627,
+ "step": 1328
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.502258789664865e-06,
+ "loss": 0.6587,
+ "step": 1329
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.489341553545121e-06,
+ "loss": 0.7315,
+ "step": 1330
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.47642517165608e-06,
+ "loss": 0.6971,
+ "step": 1331
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.46350966560429e-06,
+ "loss": 0.5892,
+ "step": 1332
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.450595056994848e-06,
+ "loss": 0.6187,
+ "step": 1333
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.437681367431335e-06,
+ "loss": 0.6176,
+ "step": 1334
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.424768618515809e-06,
+ "loss": 0.6944,
+ "step": 1335
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.411856831848745e-06,
+ "loss": 0.7163,
+ "step": 1336
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.398946029029015e-06,
+ "loss": 0.7479,
+ "step": 1337
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.386036231653836e-06,
+ "loss": 0.7053,
+ "step": 1338
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.37312746131875e-06,
+ "loss": 0.7168,
+ "step": 1339
+ },
+ {
+ "epoch": 0.53,
+ "learning_rate": 9.360219739617585e-06,
+ "loss": 0.6272,
+ "step": 1340
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.347313088142403e-06,
+ "loss": 0.5695,
+ "step": 1341
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.334407528483487e-06,
+ "loss": 0.7178,
+ "step": 1342
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.321503082229281e-06,
+ "loss": 0.6882,
+ "step": 1343
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.308599770966385e-06,
+ "loss": 0.6831,
+ "step": 1344
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.295697616279483e-06,
+ "loss": 0.6954,
+ "step": 1345
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.282796639751333e-06,
+ "loss": 0.6829,
+ "step": 1346
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.269896862962718e-06,
+ "loss": 0.6946,
+ "step": 1347
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.256998307492415e-06,
+ "loss": 0.7201,
+ "step": 1348
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.244100994917167e-06,
+ "loss": 0.7074,
+ "step": 1349
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.231204946811624e-06,
+ "loss": 0.5658,
+ "step": 1350
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.218310184748325e-06,
+ "loss": 0.6174,
+ "step": 1351
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.20541673029766e-06,
+ "loss": 0.6075,
+ "step": 1352
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.192524605027838e-06,
+ "loss": 0.6826,
+ "step": 1353
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.179633830504832e-06,
+ "loss": 0.7402,
+ "step": 1354
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.166744428292362e-06,
+ "loss": 0.7304,
+ "step": 1355
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.153856419951854e-06,
+ "loss": 0.7681,
+ "step": 1356
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.140969827042392e-06,
+ "loss": 0.7307,
+ "step": 1357
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.128084671120709e-06,
+ "loss": 0.6811,
+ "step": 1358
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.115200973741123e-06,
+ "loss": 0.6835,
+ "step": 1359
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.102318756455513e-06,
+ "loss": 0.7455,
+ "step": 1360
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.089438040813282e-06,
+ "loss": 0.6563,
+ "step": 1361
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.076558848361328e-06,
+ "loss": 0.6963,
+ "step": 1362
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.063681200643993e-06,
+ "loss": 0.678,
+ "step": 1363
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.050805119203035e-06,
+ "loss": 0.695,
+ "step": 1364
+ },
+ {
+ "epoch": 0.54,
+ "learning_rate": 9.037930625577598e-06,
+ "loss": 0.6701,
+ "step": 1365
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.025057741304164e-06,
+ "loss": 0.6227,
+ "step": 1366
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 9.012186487916526e-06,
+ "loss": 0.6916,
+ "step": 1367
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.999316886945752e-06,
+ "loss": 0.7148,
+ "step": 1368
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.986448959920137e-06,
+ "loss": 0.6444,
+ "step": 1369
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.97358272836518e-06,
+ "loss": 0.6287,
+ "step": 1370
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.96071821380355e-06,
+ "loss": 0.7121,
+ "step": 1371
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.947855437755036e-06,
+ "loss": 0.655,
+ "step": 1372
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.934994421736519e-06,
+ "loss": 0.6663,
+ "step": 1373
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.922135187261944e-06,
+ "loss": 0.7075,
+ "step": 1374
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.90927775584226e-06,
+ "loss": 0.6859,
+ "step": 1375
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.896422148985418e-06,
+ "loss": 0.685,
+ "step": 1376
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.883568388196308e-06,
+ "loss": 0.6769,
+ "step": 1377
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.87071649497673e-06,
+ "loss": 0.6598,
+ "step": 1378
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.85786649082536e-06,
+ "loss": 0.6714,
+ "step": 1379
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.845018397237722e-06,
+ "loss": 0.7623,
+ "step": 1380
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.832172235706137e-06,
+ "loss": 0.7481,
+ "step": 1381
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.819328027719692e-06,
+ "loss": 0.6767,
+ "step": 1382
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.806485794764215e-06,
+ "loss": 0.6987,
+ "step": 1383
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.793645558322218e-06,
+ "loss": 0.6866,
+ "step": 1384
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.780807339872886e-06,
+ "loss": 0.7552,
+ "step": 1385
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.76797116089202e-06,
+ "loss": 0.6944,
+ "step": 1386
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.755137042852012e-06,
+ "loss": 0.6545,
+ "step": 1387
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.742305007221801e-06,
+ "loss": 0.6239,
+ "step": 1388
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.729475075466856e-06,
+ "loss": 0.7273,
+ "step": 1389
+ },
+ {
+ "epoch": 0.55,
+ "learning_rate": 8.716647269049115e-06,
+ "loss": 0.6991,
+ "step": 1390
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.703821609426963e-06,
+ "loss": 0.6366,
+ "step": 1391
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.690998118055193e-06,
+ "loss": 0.6912,
+ "step": 1392
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.678176816384975e-06,
+ "loss": 0.7038,
+ "step": 1393
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.665357725863816e-06,
+ "loss": 0.723,
+ "step": 1394
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.65254086793552e-06,
+ "loss": 0.6831,
+ "step": 1395
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.639726264040159e-06,
+ "loss": 0.7173,
+ "step": 1396
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.626913935614026e-06,
+ "loss": 0.6047,
+ "step": 1397
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.61410390408963e-06,
+ "loss": 0.7391,
+ "step": 1398
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.601296190895611e-06,
+ "loss": 0.6778,
+ "step": 1399
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.588490817456749e-06,
+ "loss": 0.7399,
+ "step": 1400
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.575687805193902e-06,
+ "loss": 0.6995,
+ "step": 1401
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.562887175523977e-06,
+ "loss": 0.6926,
+ "step": 1402
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.550088949859906e-06,
+ "loss": 0.7275,
+ "step": 1403
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.53729314961059e-06,
+ "loss": 0.571,
+ "step": 1404
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.52449979618087e-06,
+ "loss": 0.6719,
+ "step": 1405
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.511708910971506e-06,
+ "loss": 0.6956,
+ "step": 1406
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.498920515379112e-06,
+ "loss": 0.7332,
+ "step": 1407
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.486134630796157e-06,
+ "loss": 0.6756,
+ "step": 1408
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.4733512786109e-06,
+ "loss": 0.6242,
+ "step": 1409
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.460570480207358e-06,
+ "loss": 0.6607,
+ "step": 1410
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.447792256965282e-06,
+ "loss": 0.6453,
+ "step": 1411
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.43501663026012e-06,
+ "loss": 0.6704,
+ "step": 1412
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.42224362146297e-06,
+ "loss": 0.6794,
+ "step": 1413
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.409473251940549e-06,
+ "loss": 0.6451,
+ "step": 1414
+ },
+ {
+ "epoch": 0.56,
+ "learning_rate": 8.39670554305517e-06,
+ "loss": 0.6886,
+ "step": 1415
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.383940516164677e-06,
+ "loss": 0.6867,
+ "step": 1416
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.371178192622445e-06,
+ "loss": 0.7037,
+ "step": 1417
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.358418593777322e-06,
+ "loss": 0.6562,
+ "step": 1418
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.345661740973592e-06,
+ "loss": 0.6922,
+ "step": 1419
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.332907655550948e-06,
+ "loss": 0.6447,
+ "step": 1420
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.32015635884446e-06,
+ "loss": 0.644,
+ "step": 1421
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.307407872184527e-06,
+ "loss": 0.6597,
+ "step": 1422
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.294662216896848e-06,
+ "loss": 0.6446,
+ "step": 1423
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.281919414302388e-06,
+ "loss": 0.6044,
+ "step": 1424
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.269179485717331e-06,
+ "loss": 0.7023,
+ "step": 1425
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.256442452453073e-06,
+ "loss": 0.6322,
+ "step": 1426
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.243708335816145e-06,
+ "loss": 0.6898,
+ "step": 1427
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.230977157108213e-06,
+ "loss": 0.6239,
+ "step": 1428
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.218248937626018e-06,
+ "loss": 0.7108,
+ "step": 1429
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.205523698661365e-06,
+ "loss": 0.7001,
+ "step": 1430
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.19280146150106e-06,
+ "loss": 0.6177,
+ "step": 1431
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.180082247426895e-06,
+ "loss": 0.6976,
+ "step": 1432
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.167366077715598e-06,
+ "loss": 0.6767,
+ "step": 1433
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.15465297363881e-06,
+ "loss": 0.7108,
+ "step": 1434
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.141942956463049e-06,
+ "loss": 0.6884,
+ "step": 1435
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.129236047449654e-06,
+ "loss": 0.6661,
+ "step": 1436
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.116532267854781e-06,
+ "loss": 0.608,
+ "step": 1437
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.103831638929337e-06,
+ "loss": 0.7119,
+ "step": 1438
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.091134181918971e-06,
+ "loss": 0.62,
+ "step": 1439
+ },
+ {
+ "epoch": 0.57,
+ "learning_rate": 8.078439918064016e-06,
+ "loss": 0.7315,
+ "step": 1440
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.065748868599471e-06,
+ "loss": 0.7078,
+ "step": 1441
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.053061054754952e-06,
+ "loss": 0.617,
+ "step": 1442
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.04037649775466e-06,
+ "loss": 0.7068,
+ "step": 1443
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.027695218817366e-06,
+ "loss": 0.6359,
+ "step": 1444
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.015017239156338e-06,
+ "loss": 0.6716,
+ "step": 1445
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 8.002342579979327e-06,
+ "loss": 0.6653,
+ "step": 1446
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.989671262488534e-06,
+ "loss": 0.681,
+ "step": 1447
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.977003307880579e-06,
+ "loss": 0.6882,
+ "step": 1448
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.964338737346441e-06,
+ "loss": 0.688,
+ "step": 1449
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.951677572071446e-06,
+ "loss": 0.7347,
+ "step": 1450
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.939019833235223e-06,
+ "loss": 0.6556,
+ "step": 1451
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.926365542011662e-06,
+ "loss": 0.6874,
+ "step": 1452
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.9137147195689e-06,
+ "loss": 0.6848,
+ "step": 1453
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.901067387069264e-06,
+ "loss": 0.6858,
+ "step": 1454
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.888423565669236e-06,
+ "loss": 0.6915,
+ "step": 1455
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.875783276519435e-06,
+ "loss": 0.7157,
+ "step": 1456
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.863146540764572e-06,
+ "loss": 0.7022,
+ "step": 1457
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.850513379543405e-06,
+ "loss": 0.6625,
+ "step": 1458
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.837883813988717e-06,
+ "loss": 0.6191,
+ "step": 1459
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.825257865227278e-06,
+ "loss": 0.6586,
+ "step": 1460
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.812635554379802e-06,
+ "loss": 0.728,
+ "step": 1461
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.800016902560924e-06,
+ "loss": 0.7171,
+ "step": 1462
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.787401930879159e-06,
+ "loss": 0.5983,
+ "step": 1463
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.774790660436857e-06,
+ "loss": 0.6429,
+ "step": 1464
+ },
+ {
+ "epoch": 0.58,
+ "learning_rate": 7.762183112330182e-06,
+ "loss": 0.6565,
+ "step": 1465
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.74957930764908e-06,
+ "loss": 0.7233,
+ "step": 1466
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.736979267477218e-06,
+ "loss": 0.7542,
+ "step": 1467
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.724383012891976e-06,
+ "loss": 0.6649,
+ "step": 1468
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.7117905649644e-06,
+ "loss": 0.6504,
+ "step": 1469
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.699201944759164e-06,
+ "loss": 0.6728,
+ "step": 1470
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.686617173334547e-06,
+ "loss": 0.7351,
+ "step": 1471
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.674036271742388e-06,
+ "loss": 0.6684,
+ "step": 1472
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.661459261028045e-06,
+ "loss": 0.7533,
+ "step": 1473
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.648886162230373e-06,
+ "loss": 0.635,
+ "step": 1474
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.63631699638168e-06,
+ "loss": 0.6895,
+ "step": 1475
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.623751784507707e-06,
+ "loss": 0.663,
+ "step": 1476
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.6111905476275606e-06,
+ "loss": 0.6833,
+ "step": 1477
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.598633306753715e-06,
+ "loss": 0.7049,
+ "step": 1478
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.586080082891949e-06,
+ "loss": 0.6729,
+ "step": 1479
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.57353089704133e-06,
+ "loss": 0.6382,
+ "step": 1480
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.5609857701941645e-06,
+ "loss": 0.6811,
+ "step": 1481
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.548444723335975e-06,
+ "loss": 0.6673,
+ "step": 1482
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.535907777445449e-06,
+ "loss": 0.6422,
+ "step": 1483
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.523374953494422e-06,
+ "loss": 0.6448,
+ "step": 1484
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.510846272447839e-06,
+ "loss": 0.6582,
+ "step": 1485
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.498321755263704e-06,
+ "loss": 0.621,
+ "step": 1486
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.485801422893057e-06,
+ "loss": 0.6669,
+ "step": 1487
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.473285296279942e-06,
+ "loss": 0.6662,
+ "step": 1488
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.460773396361371e-06,
+ "loss": 0.7176,
+ "step": 1489
+ },
+ {
+ "epoch": 0.59,
+ "learning_rate": 7.448265744067275e-06,
+ "loss": 0.7234,
+ "step": 1490
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.435762360320491e-06,
+ "loss": 0.6581,
+ "step": 1491
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.423263266036705e-06,
+ "loss": 0.746,
+ "step": 1492
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.410768482124431e-06,
+ "loss": 0.6634,
+ "step": 1493
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.3982780294849795e-06,
+ "loss": 0.6683,
+ "step": 1494
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.38579192901241e-06,
+ "loss": 0.702,
+ "step": 1495
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.373310201593498e-06,
+ "loss": 0.7121,
+ "step": 1496
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.360832868107708e-06,
+ "loss": 0.7609,
+ "step": 1497
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.348359949427159e-06,
+ "loss": 0.6694,
+ "step": 1498
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.335891466416574e-06,
+ "loss": 0.5551,
+ "step": 1499
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.323427439933269e-06,
+ "loss": 0.6488,
+ "step": 1500
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.31096789082709e-06,
+ "loss": 0.7251,
+ "step": 1501
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2985128399404016e-06,
+ "loss": 0.7029,
+ "step": 1502
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.2860623081080506e-06,
+ "loss": 0.5932,
+ "step": 1503
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.273616316157312e-06,
+ "loss": 0.6922,
+ "step": 1504
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.261174884907874e-06,
+ "loss": 0.6838,
+ "step": 1505
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.248738035171787e-06,
+ "loss": 0.696,
+ "step": 1506
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.236305787753454e-06,
+ "loss": 0.6322,
+ "step": 1507
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.223878163449563e-06,
+ "loss": 0.6285,
+ "step": 1508
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.211455183049072e-06,
+ "loss": 0.7774,
+ "step": 1509
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.199036867333179e-06,
+ "loss": 0.7305,
+ "step": 1510
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.186623237075265e-06,
+ "loss": 0.6675,
+ "step": 1511
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.174214313040888e-06,
+ "loss": 0.7344,
+ "step": 1512
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.161810115987727e-06,
+ "loss": 0.7051,
+ "step": 1513
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.14941066666555e-06,
+ "loss": 0.6708,
+ "step": 1514
+ },
+ {
+ "epoch": 0.6,
+ "learning_rate": 7.137015985816183e-06,
+ "loss": 0.692,
+ "step": 1515
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.124626094173484e-06,
+ "loss": 0.6377,
+ "step": 1516
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.1122410124632935e-06,
+ "loss": 0.551,
+ "step": 1517
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0998607614034035e-06,
+ "loss": 0.6542,
+ "step": 1518
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.087485361703532e-06,
+ "loss": 0.6535,
+ "step": 1519
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.075114834065272e-06,
+ "loss": 0.7097,
+ "step": 1520
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.06274919918208e-06,
+ "loss": 0.6212,
+ "step": 1521
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0503884777392166e-06,
+ "loss": 0.7079,
+ "step": 1522
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.0380326904137265e-06,
+ "loss": 0.649,
+ "step": 1523
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.025681857874398e-06,
+ "loss": 0.6673,
+ "step": 1524
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.013336000781739e-06,
+ "loss": 0.6792,
+ "step": 1525
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 7.000995139787929e-06,
+ "loss": 0.6692,
+ "step": 1526
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.988659295536792e-06,
+ "loss": 0.6512,
+ "step": 1527
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9763284886637516e-06,
+ "loss": 0.7137,
+ "step": 1528
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.964002739795814e-06,
+ "loss": 0.6376,
+ "step": 1529
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9516820695515285e-06,
+ "loss": 0.6358,
+ "step": 1530
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.939366498540936e-06,
+ "loss": 0.735,
+ "step": 1531
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.927056047365558e-06,
+ "loss": 0.7057,
+ "step": 1532
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.914750736618341e-06,
+ "loss": 0.6713,
+ "step": 1533
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.9024505868836465e-06,
+ "loss": 0.6894,
+ "step": 1534
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.890155618737189e-06,
+ "loss": 0.6584,
+ "step": 1535
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.877865852746026e-06,
+ "loss": 0.6548,
+ "step": 1536
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.865581309468506e-06,
+ "loss": 0.6417,
+ "step": 1537
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.853302009454239e-06,
+ "loss": 0.7216,
+ "step": 1538
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.841027973244077e-06,
+ "loss": 0.6633,
+ "step": 1539
+ },
+ {
+ "epoch": 0.61,
+ "learning_rate": 6.828759221370049e-06,
+ "loss": 0.6586,
+ "step": 1540
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.816495774355362e-06,
+ "loss": 0.6521,
+ "step": 1541
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.804237652714334e-06,
+ "loss": 0.6767,
+ "step": 1542
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.7919848769523786e-06,
+ "loss": 0.6852,
+ "step": 1543
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.779737467565974e-06,
+ "loss": 0.7157,
+ "step": 1544
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.767495445042618e-06,
+ "loss": 0.6304,
+ "step": 1545
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.755258829860791e-06,
+ "loss": 0.6375,
+ "step": 1546
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.743027642489935e-06,
+ "loss": 0.6752,
+ "step": 1547
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.730801903390412e-06,
+ "loss": 0.6685,
+ "step": 1548
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.718581633013465e-06,
+ "loss": 0.6624,
+ "step": 1549
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.706366851801194e-06,
+ "loss": 0.6439,
+ "step": 1550
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.694157580186516e-06,
+ "loss": 0.6333,
+ "step": 1551
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.681953838593125e-06,
+ "loss": 0.6606,
+ "step": 1552
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.669755647435474e-06,
+ "loss": 0.6225,
+ "step": 1553
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.657563027118732e-06,
+ "loss": 0.6062,
+ "step": 1554
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.645375998038737e-06,
+ "loss": 0.7292,
+ "step": 1555
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.633194580581978e-06,
+ "loss": 0.6752,
+ "step": 1556
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.621018795125569e-06,
+ "loss": 0.6172,
+ "step": 1557
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.6088486620371905e-06,
+ "loss": 0.7737,
+ "step": 1558
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.596684201675069e-06,
+ "loss": 0.7042,
+ "step": 1559
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.584525434387945e-06,
+ "loss": 0.6707,
+ "step": 1560
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.572372380515031e-06,
+ "loss": 0.7354,
+ "step": 1561
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.560225060385992e-06,
+ "loss": 0.6802,
+ "step": 1562
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.54808349432089e-06,
+ "loss": 0.6681,
+ "step": 1563
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.535947702630168e-06,
+ "loss": 0.7121,
+ "step": 1564
+ },
+ {
+ "epoch": 0.62,
+ "learning_rate": 6.523817705614602e-06,
+ "loss": 0.559,
+ "step": 1565
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.51169352356529e-06,
+ "loss": 0.6672,
+ "step": 1566
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.499575176763591e-06,
+ "loss": 0.6024,
+ "step": 1567
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.487462685481103e-06,
+ "loss": 0.6304,
+ "step": 1568
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4753560699796305e-06,
+ "loss": 0.6796,
+ "step": 1569
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4632553505111484e-06,
+ "loss": 0.6726,
+ "step": 1570
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.451160547317777e-06,
+ "loss": 0.7079,
+ "step": 1571
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.439071680631725e-06,
+ "loss": 0.686,
+ "step": 1572
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.4269887706752845e-06,
+ "loss": 0.7217,
+ "step": 1573
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.414911837660768e-06,
+ "loss": 0.6898,
+ "step": 1574
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.402840901790508e-06,
+ "loss": 0.6744,
+ "step": 1575
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.39077598325679e-06,
+ "loss": 0.6645,
+ "step": 1576
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.378717102241842e-06,
+ "loss": 0.6703,
+ "step": 1577
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.366664278917789e-06,
+ "loss": 0.7755,
+ "step": 1578
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.3546175334466185e-06,
+ "loss": 0.6994,
+ "step": 1579
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.342576885980165e-06,
+ "loss": 0.6928,
+ "step": 1580
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.330542356660047e-06,
+ "loss": 0.6069,
+ "step": 1581
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.318513965617657e-06,
+ "loss": 0.6549,
+ "step": 1582
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.306491732974111e-06,
+ "loss": 0.6395,
+ "step": 1583
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2944756788402394e-06,
+ "loss": 0.6633,
+ "step": 1584
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.282465823316519e-06,
+ "loss": 0.6736,
+ "step": 1585
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.270462186493071e-06,
+ "loss": 0.6246,
+ "step": 1586
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.2584647884496034e-06,
+ "loss": 0.6677,
+ "step": 1587
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.246473649255391e-06,
+ "loss": 0.7185,
+ "step": 1588
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.234488788969251e-06,
+ "loss": 0.6503,
+ "step": 1589
+ },
+ {
+ "epoch": 0.63,
+ "learning_rate": 6.22251022763948e-06,
+ "loss": 0.7008,
+ "step": 1590
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.2105379853038436e-06,
+ "loss": 0.688,
+ "step": 1591
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.19857208198954e-06,
+ "loss": 0.617,
+ "step": 1592
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.186612537713164e-06,
+ "loss": 0.7381,
+ "step": 1593
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.1746593724806666e-06,
+ "loss": 0.6369,
+ "step": 1594
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.162712606287335e-06,
+ "loss": 0.6358,
+ "step": 1595
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.150772259117748e-06,
+ "loss": 0.6744,
+ "step": 1596
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.138838350945744e-06,
+ "loss": 0.6734,
+ "step": 1597
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.126910901734397e-06,
+ "loss": 0.646,
+ "step": 1598
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.114989931435976e-06,
+ "loss": 0.6891,
+ "step": 1599
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.103075459991905e-06,
+ "loss": 0.7032,
+ "step": 1600
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.0911675073327385e-06,
+ "loss": 0.7151,
+ "step": 1601
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.079266093378138e-06,
+ "loss": 0.6547,
+ "step": 1602
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.067371238036812e-06,
+ "loss": 0.6962,
+ "step": 1603
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.055482961206503e-06,
+ "loss": 0.7367,
+ "step": 1604
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.043601282773952e-06,
+ "loss": 0.7226,
+ "step": 1605
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.031726222614855e-06,
+ "loss": 0.6348,
+ "step": 1606
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.019857800593847e-06,
+ "loss": 0.6329,
+ "step": 1607
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 6.007996036564454e-06,
+ "loss": 0.6258,
+ "step": 1608
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9961409503690605e-06,
+ "loss": 0.6745,
+ "step": 1609
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.984292561838881e-06,
+ "loss": 0.7422,
+ "step": 1610
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.9724508907939345e-06,
+ "loss": 0.6266,
+ "step": 1611
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.960615957042999e-06,
+ "loss": 0.6208,
+ "step": 1612
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.948787780383574e-06,
+ "loss": 0.5982,
+ "step": 1613
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.936966380601867e-06,
+ "loss": 0.6725,
+ "step": 1614
+ },
+ {
+ "epoch": 0.64,
+ "learning_rate": 5.92515177747274e-06,
+ "loss": 0.6306,
+ "step": 1615
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.913343990759695e-06,
+ "loss": 0.7382,
+ "step": 1616
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.901543040214823e-06,
+ "loss": 0.6857,
+ "step": 1617
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.889748945578788e-06,
+ "loss": 0.7182,
+ "step": 1618
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.877961726580773e-06,
+ "loss": 0.6547,
+ "step": 1619
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.866181402938469e-06,
+ "loss": 0.6588,
+ "step": 1620
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.854407994358037e-06,
+ "loss": 0.6747,
+ "step": 1621
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.842641520534055e-06,
+ "loss": 0.6977,
+ "step": 1622
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.830882001149517e-06,
+ "loss": 0.6987,
+ "step": 1623
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.819129455875764e-06,
+ "loss": 0.672,
+ "step": 1624
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.8073839043724935e-06,
+ "loss": 0.6469,
+ "step": 1625
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7956453662876935e-06,
+ "loss": 0.6513,
+ "step": 1626
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.783913861257611e-06,
+ "loss": 0.6628,
+ "step": 1627
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.772189408906739e-06,
+ "loss": 0.6446,
+ "step": 1628
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.760472028847773e-06,
+ "loss": 0.6966,
+ "step": 1629
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.748761740681573e-06,
+ "loss": 0.6774,
+ "step": 1630
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.737058563997138e-06,
+ "loss": 0.6124,
+ "step": 1631
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.725362518371576e-06,
+ "loss": 0.6069,
+ "step": 1632
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.713673623370046e-06,
+ "loss": 0.7209,
+ "step": 1633
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.7019918985457745e-06,
+ "loss": 0.6837,
+ "step": 1634
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.6903173634399785e-06,
+ "loss": 0.6947,
+ "step": 1635
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.678650037581841e-06,
+ "loss": 0.6049,
+ "step": 1636
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.666989940488496e-06,
+ "loss": 0.6785,
+ "step": 1637
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.655337091664985e-06,
+ "loss": 0.659,
+ "step": 1638
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.643691510604221e-06,
+ "loss": 0.6189,
+ "step": 1639
+ },
+ {
+ "epoch": 0.65,
+ "learning_rate": 5.632053216786959e-06,
+ "loss": 0.6472,
+ "step": 1640
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.620422229681771e-06,
+ "loss": 0.6588,
+ "step": 1641
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.608798568744989e-06,
+ "loss": 0.6453,
+ "step": 1642
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.597182253420712e-06,
+ "loss": 0.6095,
+ "step": 1643
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.585573303140741e-06,
+ "loss": 0.6439,
+ "step": 1644
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.573971737324551e-06,
+ "loss": 0.6744,
+ "step": 1645
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.56237757537927e-06,
+ "loss": 0.6247,
+ "step": 1646
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.550790836699642e-06,
+ "loss": 0.5964,
+ "step": 1647
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.539211540667992e-06,
+ "loss": 0.6411,
+ "step": 1648
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.527639706654195e-06,
+ "loss": 0.7006,
+ "step": 1649
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.516075354015646e-06,
+ "loss": 0.7065,
+ "step": 1650
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.504518502097212e-06,
+ "loss": 0.7052,
+ "step": 1651
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4929691702312395e-06,
+ "loss": 0.6403,
+ "step": 1652
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.481427377737465e-06,
+ "loss": 0.6265,
+ "step": 1653
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.4698931439230314e-06,
+ "loss": 0.6767,
+ "step": 1654
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.458366488082436e-06,
+ "loss": 0.6895,
+ "step": 1655
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.446847429497494e-06,
+ "loss": 0.6659,
+ "step": 1656
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.435335987437316e-06,
+ "loss": 0.7339,
+ "step": 1657
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.423832181158274e-06,
+ "loss": 0.666,
+ "step": 1658
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.412336029903956e-06,
+ "loss": 0.7169,
+ "step": 1659
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.400847552905152e-06,
+ "loss": 0.6974,
+ "step": 1660
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.3893667693798245e-06,
+ "loss": 0.7114,
+ "step": 1661
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.377893698533048e-06,
+ "loss": 0.6894,
+ "step": 1662
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.366428359557003e-06,
+ "loss": 0.6596,
+ "step": 1663
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.354970771630945e-06,
+ "loss": 0.6556,
+ "step": 1664
+ },
+ {
+ "epoch": 0.66,
+ "learning_rate": 5.34352095392114e-06,
+ "loss": 0.606,
+ "step": 1665
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.332078925580883e-06,
+ "loss": 0.6318,
+ "step": 1666
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.320644705750431e-06,
+ "loss": 0.6919,
+ "step": 1667
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.309218313556961e-06,
+ "loss": 0.6547,
+ "step": 1668
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.297799768114575e-06,
+ "loss": 0.6585,
+ "step": 1669
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.286389088524253e-06,
+ "loss": 0.6035,
+ "step": 1670
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.274986293873796e-06,
+ "loss": 0.6726,
+ "step": 1671
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.263591403237831e-06,
+ "loss": 0.5983,
+ "step": 1672
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.252204435677763e-06,
+ "loss": 0.6333,
+ "step": 1673
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2408254102417275e-06,
+ "loss": 0.69,
+ "step": 1674
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2294543459646e-06,
+ "loss": 0.6227,
+ "step": 1675
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.2180912618679195e-06,
+ "loss": 0.5856,
+ "step": 1676
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.206736176959877e-06,
+ "loss": 0.6419,
+ "step": 1677
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.19538911023529e-06,
+ "loss": 0.6937,
+ "step": 1678
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.184050080675558e-06,
+ "loss": 0.7041,
+ "step": 1679
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.1727191072486385e-06,
+ "loss": 0.6228,
+ "step": 1680
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.161396208909012e-06,
+ "loss": 0.6375,
+ "step": 1681
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.150081404597654e-06,
+ "loss": 0.6395,
+ "step": 1682
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.138774713241983e-06,
+ "loss": 0.6672,
+ "step": 1683
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.127476153755873e-06,
+ "loss": 0.6147,
+ "step": 1684
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.116185745039585e-06,
+ "loss": 0.6865,
+ "step": 1685
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.10490350597973e-06,
+ "loss": 0.6083,
+ "step": 1686
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.093629455449273e-06,
+ "loss": 0.6479,
+ "step": 1687
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.082363612307471e-06,
+ "loss": 0.7257,
+ "step": 1688
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.071105995399852e-06,
+ "loss": 0.5806,
+ "step": 1689
+ },
+ {
+ "epoch": 0.67,
+ "learning_rate": 5.059856623558189e-06,
+ "loss": 0.6849,
+ "step": 1690
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0486155156004615e-06,
+ "loss": 0.6263,
+ "step": 1691
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.0373826903308095e-06,
+ "loss": 0.6844,
+ "step": 1692
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.026158166539548e-06,
+ "loss": 0.6637,
+ "step": 1693
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.014941963003075e-06,
+ "loss": 0.6457,
+ "step": 1694
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 5.003734098483887e-06,
+ "loss": 0.6766,
+ "step": 1695
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.992534591730527e-06,
+ "loss": 0.6596,
+ "step": 1696
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.981343461477559e-06,
+ "loss": 0.7206,
+ "step": 1697
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.970160726445531e-06,
+ "loss": 0.6716,
+ "step": 1698
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9589864053409545e-06,
+ "loss": 0.6584,
+ "step": 1699
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9478205168562535e-06,
+ "loss": 0.743,
+ "step": 1700
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.936663079669753e-06,
+ "loss": 0.6391,
+ "step": 1701
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.925514112445653e-06,
+ "loss": 0.7294,
+ "step": 1702
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.914373633833962e-06,
+ "loss": 0.6649,
+ "step": 1703
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.9032416624705036e-06,
+ "loss": 0.7002,
+ "step": 1704
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.892118216976871e-06,
+ "loss": 0.63,
+ "step": 1705
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.881003315960378e-06,
+ "loss": 0.689,
+ "step": 1706
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.869896978014071e-06,
+ "loss": 0.6366,
+ "step": 1707
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.858799221716659e-06,
+ "loss": 0.674,
+ "step": 1708
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.847710065632489e-06,
+ "loss": 0.6259,
+ "step": 1709
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.836629528311527e-06,
+ "loss": 0.6863,
+ "step": 1710
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.8255576282893355e-06,
+ "loss": 0.6666,
+ "step": 1711
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.814494384087004e-06,
+ "loss": 0.67,
+ "step": 1712
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.803439814211158e-06,
+ "loss": 0.6607,
+ "step": 1713
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.792393937153914e-06,
+ "loss": 0.6694,
+ "step": 1714
+ },
+ {
+ "epoch": 0.68,
+ "learning_rate": 4.78135677139283e-06,
+ "loss": 0.7049,
+ "step": 1715
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.770328335390915e-06,
+ "loss": 0.7359,
+ "step": 1716
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.759308647596564e-06,
+ "loss": 0.6764,
+ "step": 1717
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7482977264435305e-06,
+ "loss": 0.7386,
+ "step": 1718
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.737295590350913e-06,
+ "loss": 0.6338,
+ "step": 1719
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.726302257723113e-06,
+ "loss": 0.7226,
+ "step": 1720
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.7153177469498045e-06,
+ "loss": 0.7281,
+ "step": 1721
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.704342076405902e-06,
+ "loss": 0.7164,
+ "step": 1722
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6933752644515404e-06,
+ "loss": 0.7297,
+ "step": 1723
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.682417329432016e-06,
+ "loss": 0.6265,
+ "step": 1724
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.671468289677802e-06,
+ "loss": 0.678,
+ "step": 1725
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.660528163504479e-06,
+ "loss": 0.7319,
+ "step": 1726
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6495969692127084e-06,
+ "loss": 0.6426,
+ "step": 1727
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6386747250882226e-06,
+ "loss": 0.6625,
+ "step": 1728
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.6277614494017784e-06,
+ "loss": 0.6555,
+ "step": 1729
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.616857160409131e-06,
+ "loss": 0.6658,
+ "step": 1730
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.605961876351001e-06,
+ "loss": 0.6409,
+ "step": 1731
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.595075615453049e-06,
+ "loss": 0.7149,
+ "step": 1732
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.584198395925829e-06,
+ "loss": 0.7193,
+ "step": 1733
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5733302359647955e-06,
+ "loss": 0.7119,
+ "step": 1734
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.562471153750221e-06,
+ "loss": 0.6695,
+ "step": 1735
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.55162116744721e-06,
+ "loss": 0.7238,
+ "step": 1736
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.5407802952056435e-06,
+ "loss": 0.7054,
+ "step": 1737
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.529948555160162e-06,
+ "loss": 0.7071,
+ "step": 1738
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.519125965430125e-06,
+ "loss": 0.6507,
+ "step": 1739
+ },
+ {
+ "epoch": 0.69,
+ "learning_rate": 4.508312544119592e-06,
+ "loss": 0.7183,
+ "step": 1740
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.497508309317271e-06,
+ "loss": 0.69,
+ "step": 1741
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.486713279096515e-06,
+ "loss": 0.6881,
+ "step": 1742
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.475927471515287e-06,
+ "loss": 0.6515,
+ "step": 1743
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.465150904616101e-06,
+ "loss": 0.6066,
+ "step": 1744
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.454383596426028e-06,
+ "loss": 0.6323,
+ "step": 1745
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.443625564956648e-06,
+ "loss": 0.687,
+ "step": 1746
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.432876828204022e-06,
+ "loss": 0.703,
+ "step": 1747
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.4221374041486654e-06,
+ "loss": 0.6845,
+ "step": 1748
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.411407310755513e-06,
+ "loss": 0.7389,
+ "step": 1749
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.400686565973886e-06,
+ "loss": 0.6049,
+ "step": 1750
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.389975187737472e-06,
+ "loss": 0.676,
+ "step": 1751
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3792731939643015e-06,
+ "loss": 0.658,
+ "step": 1752
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.368580602556687e-06,
+ "loss": 0.6514,
+ "step": 1753
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.3578974314012225e-06,
+ "loss": 0.6694,
+ "step": 1754
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.347223698368748e-06,
+ "loss": 0.7112,
+ "step": 1755
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.336559421314298e-06,
+ "loss": 0.6177,
+ "step": 1756
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.325904618077114e-06,
+ "loss": 0.6703,
+ "step": 1757
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.315259306480575e-06,
+ "loss": 0.7118,
+ "step": 1758
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.304623504332178e-06,
+ "loss": 0.7268,
+ "step": 1759
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.293997229423523e-06,
+ "loss": 0.6511,
+ "step": 1760
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.283380499530269e-06,
+ "loss": 0.7128,
+ "step": 1761
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2727733324121055e-06,
+ "loss": 0.6264,
+ "step": 1762
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.262175745812729e-06,
+ "loss": 0.6617,
+ "step": 1763
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.251587757459813e-06,
+ "loss": 0.6619,
+ "step": 1764
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.241009385064958e-06,
+ "loss": 0.734,
+ "step": 1765
+ },
+ {
+ "epoch": 0.7,
+ "learning_rate": 4.2304406463237026e-06,
+ "loss": 0.658,
+ "step": 1766
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.219881558915458e-06,
+ "loss": 0.6959,
+ "step": 1767
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.209332140503486e-06,
+ "loss": 0.6788,
+ "step": 1768
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.198792408734882e-06,
+ "loss": 0.6359,
+ "step": 1769
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1882623812405345e-06,
+ "loss": 0.6249,
+ "step": 1770
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1777420756351e-06,
+ "loss": 0.6387,
+ "step": 1771
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1672315095169725e-06,
+ "loss": 0.651,
+ "step": 1772
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1567307004682546e-06,
+ "loss": 0.5963,
+ "step": 1773
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.146239666054716e-06,
+ "loss": 0.6718,
+ "step": 1774
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.135758423825798e-06,
+ "loss": 0.7079,
+ "step": 1775
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.125286991314538e-06,
+ "loss": 0.6462,
+ "step": 1776
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.1148253860375764e-06,
+ "loss": 0.7071,
+ "step": 1777
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.104373625495114e-06,
+ "loss": 0.6268,
+ "step": 1778
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.09393172717088e-06,
+ "loss": 0.6209,
+ "step": 1779
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.083499708532108e-06,
+ "loss": 0.6724,
+ "step": 1780
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.073077587029509e-06,
+ "loss": 0.6303,
+ "step": 1781
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0626653800972236e-06,
+ "loss": 0.6955,
+ "step": 1782
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0522631051528205e-06,
+ "loss": 0.6646,
+ "step": 1783
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.0418707795972575e-06,
+ "loss": 0.6455,
+ "step": 1784
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.031488420814834e-06,
+ "loss": 0.7123,
+ "step": 1785
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.021116046173189e-06,
+ "loss": 0.6547,
+ "step": 1786
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.010753673023254e-06,
+ "loss": 0.5691,
+ "step": 1787
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 4.000401318699236e-06,
+ "loss": 0.6893,
+ "step": 1788
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9900590005185735e-06,
+ "loss": 0.657,
+ "step": 1789
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.9797267357819294e-06,
+ "loss": 0.7086,
+ "step": 1790
+ },
+ {
+ "epoch": 0.71,
+ "learning_rate": 3.969404541773132e-06,
+ "loss": 0.6675,
+ "step": 1791
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.959092435759174e-06,
+ "loss": 0.6793,
+ "step": 1792
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.9487904349901815e-06,
+ "loss": 0.6719,
+ "step": 1793
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.938498556699357e-06,
+ "loss": 0.6414,
+ "step": 1794
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.928216818102982e-06,
+ "loss": 0.7056,
+ "step": 1795
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.917945236400374e-06,
+ "loss": 0.6142,
+ "step": 1796
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.907683828773861e-06,
+ "loss": 0.6755,
+ "step": 1797
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.897432612388752e-06,
+ "loss": 0.6896,
+ "step": 1798
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8871916043933065e-06,
+ "loss": 0.6048,
+ "step": 1799
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.876960821918704e-06,
+ "loss": 0.6429,
+ "step": 1800
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.866740282079023e-06,
+ "loss": 0.6709,
+ "step": 1801
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8565300019712115e-06,
+ "loss": 0.6774,
+ "step": 1802
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.846329998675048e-06,
+ "loss": 0.6958,
+ "step": 1803
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.836140289253124e-06,
+ "loss": 0.5788,
+ "step": 1804
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8259608907508105e-06,
+ "loss": 0.6077,
+ "step": 1805
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.81579182019623e-06,
+ "loss": 0.7346,
+ "step": 1806
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.8056330946002283e-06,
+ "loss": 0.7175,
+ "step": 1807
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7954847309563504e-06,
+ "loss": 0.6932,
+ "step": 1808
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7853467462407966e-06,
+ "loss": 0.623,
+ "step": 1809
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7752191574124176e-06,
+ "loss": 0.6646,
+ "step": 1810
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7651019814126656e-06,
+ "loss": 0.6691,
+ "step": 1811
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7549952351655795e-06,
+ "loss": 0.6734,
+ "step": 1812
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7448989355777497e-06,
+ "loss": 0.6692,
+ "step": 1813
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.734813099538289e-06,
+ "loss": 0.6776,
+ "step": 1814
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.724737743918809e-06,
+ "loss": 0.6591,
+ "step": 1815
+ },
+ {
+ "epoch": 0.72,
+ "learning_rate": 3.7146728855733947e-06,
+ "loss": 0.6908,
+ "step": 1816
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.7046185413385573e-06,
+ "loss": 0.6624,
+ "step": 1817
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.694574728033231e-06,
+ "loss": 0.7122,
+ "step": 1818
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6845414624587326e-06,
+ "loss": 0.6585,
+ "step": 1819
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.674518761398733e-06,
+ "loss": 0.585,
+ "step": 1820
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.664506641619232e-06,
+ "loss": 0.6576,
+ "step": 1821
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.65450511986853e-06,
+ "loss": 0.6425,
+ "step": 1822
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.644514212877187e-06,
+ "loss": 0.6633,
+ "step": 1823
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6345339373580267e-06,
+ "loss": 0.6617,
+ "step": 1824
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.62456431000608e-06,
+ "loss": 0.6907,
+ "step": 1825
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6146053474985565e-06,
+ "loss": 0.707,
+ "step": 1826
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.6046570664948357e-06,
+ "loss": 0.7095,
+ "step": 1827
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5947194836364264e-06,
+ "loss": 0.6971,
+ "step": 1828
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5847926155469427e-06,
+ "loss": 0.6956,
+ "step": 1829
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.574876478832072e-06,
+ "loss": 0.5781,
+ "step": 1830
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5649710900795564e-06,
+ "loss": 0.6727,
+ "step": 1831
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5550764658591486e-06,
+ "loss": 0.6978,
+ "step": 1832
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5451926227225997e-06,
+ "loss": 0.6234,
+ "step": 1833
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5353195772036352e-06,
+ "loss": 0.6846,
+ "step": 1834
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.525457345817902e-06,
+ "loss": 0.6034,
+ "step": 1835
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5156059450629677e-06,
+ "loss": 0.6853,
+ "step": 1836
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.5057653914182787e-06,
+ "loss": 0.6497,
+ "step": 1837
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4959357013451368e-06,
+ "loss": 0.6764,
+ "step": 1838
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.486116891286674e-06,
+ "loss": 0.6953,
+ "step": 1839
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4763089776678206e-06,
+ "loss": 0.6196,
+ "step": 1840
+ },
+ {
+ "epoch": 0.73,
+ "learning_rate": 3.4665119768952736e-06,
+ "loss": 0.6806,
+ "step": 1841
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.456725905357482e-06,
+ "loss": 0.6375,
+ "step": 1842
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4469507794246105e-06,
+ "loss": 0.6256,
+ "step": 1843
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4371866154485147e-06,
+ "loss": 0.6619,
+ "step": 1844
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.42743342976271e-06,
+ "loss": 0.6888,
+ "step": 1845
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.417691238682351e-06,
+ "loss": 0.6084,
+ "step": 1846
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.4079600585041993e-06,
+ "loss": 0.6814,
+ "step": 1847
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3982399055065953e-06,
+ "loss": 0.6683,
+ "step": 1848
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3885307959494374e-06,
+ "loss": 0.6402,
+ "step": 1849
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.378832746074143e-06,
+ "loss": 0.6663,
+ "step": 1850
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.369145772103637e-06,
+ "loss": 0.6955,
+ "step": 1851
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.359469890242313e-06,
+ "loss": 0.6315,
+ "step": 1852
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.349805116676008e-06,
+ "loss": 0.6522,
+ "step": 1853
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3401514675719815e-06,
+ "loss": 0.5938,
+ "step": 1854
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3305089590788796e-06,
+ "loss": 0.716,
+ "step": 1855
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3208776073267146e-06,
+ "loss": 0.656,
+ "step": 1856
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.3112574284268395e-06,
+ "loss": 0.6287,
+ "step": 1857
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.301648438471905e-06,
+ "loss": 0.639,
+ "step": 1858
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2920506535358566e-06,
+ "loss": 0.6446,
+ "step": 1859
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2824640896738935e-06,
+ "loss": 0.668,
+ "step": 1860
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2728887629224415e-06,
+ "loss": 0.6964,
+ "step": 1861
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.263324689299132e-06,
+ "loss": 0.6583,
+ "step": 1862
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2537718848027744e-06,
+ "loss": 0.6304,
+ "step": 1863
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.2442303654133124e-06,
+ "loss": 0.6056,
+ "step": 1864
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.234700147091834e-06,
+ "loss": 0.5975,
+ "step": 1865
+ },
+ {
+ "epoch": 0.74,
+ "learning_rate": 3.22518124578051e-06,
+ "loss": 0.7077,
+ "step": 1866
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.2156736774025766e-06,
+ "loss": 0.698,
+ "step": 1867
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.206177457862321e-06,
+ "loss": 0.6859,
+ "step": 1868
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1966926030450408e-06,
+ "loss": 0.7183,
+ "step": 1869
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.187219128817025e-06,
+ "loss": 0.6573,
+ "step": 1870
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1777570510255262e-06,
+ "loss": 0.7102,
+ "step": 1871
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.168306385498733e-06,
+ "loss": 0.6429,
+ "step": 1872
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1588671480457324e-06,
+ "loss": 0.7265,
+ "step": 1873
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.149439354456514e-06,
+ "loss": 0.5894,
+ "step": 1874
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1400230205019124e-06,
+ "loss": 0.667,
+ "step": 1875
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1306181619335894e-06,
+ "loss": 0.695,
+ "step": 1876
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.121224794484019e-06,
+ "loss": 0.6435,
+ "step": 1877
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.1118429338664467e-06,
+ "loss": 0.5966,
+ "step": 1878
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.102472595774875e-06,
+ "loss": 0.5781,
+ "step": 1879
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.093113795884026e-06,
+ "loss": 0.6029,
+ "step": 1880
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0837665498493273e-06,
+ "loss": 0.6813,
+ "step": 1881
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.074430873306865e-06,
+ "loss": 0.6044,
+ "step": 1882
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0651067818733948e-06,
+ "loss": 0.7047,
+ "step": 1883
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.05579429114627e-06,
+ "loss": 0.6871,
+ "step": 1884
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.046493416703452e-06,
+ "loss": 0.6599,
+ "step": 1885
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0372041741034642e-06,
+ "loss": 0.5886,
+ "step": 1886
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0279265788853763e-06,
+ "loss": 0.6521,
+ "step": 1887
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0186606465687705e-06,
+ "loss": 0.6332,
+ "step": 1888
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.0094063926537233e-06,
+ "loss": 0.6826,
+ "step": 1889
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 3.000163832620775e-06,
+ "loss": 0.6078,
+ "step": 1890
+ },
+ {
+ "epoch": 0.75,
+ "learning_rate": 2.990932981930893e-06,
+ "loss": 0.5741,
+ "step": 1891
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.98171385602548e-06,
+ "loss": 0.6129,
+ "step": 1892
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.972506470326303e-06,
+ "loss": 0.5855,
+ "step": 1893
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9633108402355027e-06,
+ "loss": 0.6727,
+ "step": 1894
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.95412698113555e-06,
+ "loss": 0.713,
+ "step": 1895
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.944954908389229e-06,
+ "loss": 0.7165,
+ "step": 1896
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.935794637339605e-06,
+ "loss": 0.6809,
+ "step": 1897
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9266461833100047e-06,
+ "loss": 0.7085,
+ "step": 1898
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.917509561603977e-06,
+ "loss": 0.7188,
+ "step": 1899
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.9083847875052894e-06,
+ "loss": 0.6625,
+ "step": 1900
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.899271876277887e-06,
+ "loss": 0.6488,
+ "step": 1901
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.890170843165868e-06,
+ "loss": 0.63,
+ "step": 1902
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.881081703393466e-06,
+ "loss": 0.6852,
+ "step": 1903
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8720044721650155e-06,
+ "loss": 0.6787,
+ "step": 1904
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.862939164664924e-06,
+ "loss": 0.6825,
+ "step": 1905
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8538857960576694e-06,
+ "loss": 0.5892,
+ "step": 1906
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.844844381487748e-06,
+ "loss": 0.6511,
+ "step": 1907
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.835814936079655e-06,
+ "loss": 0.712,
+ "step": 1908
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8267974749378714e-06,
+ "loss": 0.5824,
+ "step": 1909
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.8177920131468273e-06,
+ "loss": 0.633,
+ "step": 1910
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.808798565770883e-06,
+ "loss": 0.6757,
+ "step": 1911
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.799817147854299e-06,
+ "loss": 0.6372,
+ "step": 1912
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.790847774421215e-06,
+ "loss": 0.6062,
+ "step": 1913
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7818904604756125e-06,
+ "loss": 0.6865,
+ "step": 1914
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.7729452210013173e-06,
+ "loss": 0.6596,
+ "step": 1915
+ },
+ {
+ "epoch": 0.76,
+ "learning_rate": 2.764012070961948e-06,
+ "loss": 0.6622,
+ "step": 1916
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7550910253008933e-06,
+ "loss": 0.6725,
+ "step": 1917
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7461820989413036e-06,
+ "loss": 0.6574,
+ "step": 1918
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.737285306786053e-06,
+ "loss": 0.7364,
+ "step": 1919
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7284006637177164e-06,
+ "loss": 0.6862,
+ "step": 1920
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7195281845985467e-06,
+ "loss": 0.6382,
+ "step": 1921
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.7106678842704514e-06,
+ "loss": 0.6689,
+ "step": 1922
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.701819777554956e-06,
+ "loss": 0.5926,
+ "step": 1923
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6929838792532035e-06,
+ "loss": 0.6533,
+ "step": 1924
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.684160204145899e-06,
+ "loss": 0.6106,
+ "step": 1925
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.675348766993311e-06,
+ "loss": 0.6606,
+ "step": 1926
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.666549582535233e-06,
+ "loss": 0.6714,
+ "step": 1927
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6577626654909617e-06,
+ "loss": 0.6555,
+ "step": 1928
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.648988030559274e-06,
+ "loss": 0.6615,
+ "step": 1929
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.640225692418401e-06,
+ "loss": 0.6976,
+ "step": 1930
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6314756657260053e-06,
+ "loss": 0.6781,
+ "step": 1931
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.622737965119145e-06,
+ "loss": 0.6564,
+ "step": 1932
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6140126052142788e-06,
+ "loss": 0.7354,
+ "step": 1933
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.6052996006072008e-06,
+ "loss": 0.5138,
+ "step": 1934
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5965989658730485e-06,
+ "loss": 0.6462,
+ "step": 1935
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5879107155662676e-06,
+ "loss": 0.6377,
+ "step": 1936
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5792348642205823e-06,
+ "loss": 0.6021,
+ "step": 1937
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.570571426348978e-06,
+ "loss": 0.6445,
+ "step": 1938
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.5619204164436796e-06,
+ "loss": 0.6269,
+ "step": 1939
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.553281848976109e-06,
+ "loss": 0.7203,
+ "step": 1940
+ },
+ {
+ "epoch": 0.77,
+ "learning_rate": 2.544655738396885e-06,
+ "loss": 0.6196,
+ "step": 1941
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.536042099135795e-06,
+ "loss": 0.6628,
+ "step": 1942
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.527440945601747e-06,
+ "loss": 0.6729,
+ "step": 1943
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.518852292182774e-06,
+ "loss": 0.5689,
+ "step": 1944
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.5102761532460005e-06,
+ "loss": 0.6368,
+ "step": 1945
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.501712543137602e-06,
+ "loss": 0.6867,
+ "step": 1946
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4931614761828182e-06,
+ "loss": 0.7013,
+ "step": 1947
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.484622966685892e-06,
+ "loss": 0.63,
+ "step": 1948
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.476097028930058e-06,
+ "loss": 0.5955,
+ "step": 1949
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4675836771775265e-06,
+ "loss": 0.6112,
+ "step": 1950
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.4590829256694615e-06,
+ "loss": 0.6546,
+ "step": 1951
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.450594788625932e-06,
+ "loss": 0.6858,
+ "step": 1952
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.442119280245917e-06,
+ "loss": 0.6326,
+ "step": 1953
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.433656414707272e-06,
+ "loss": 0.6046,
+ "step": 1954
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.425206206166688e-06,
+ "loss": 0.7061,
+ "step": 1955
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.416768668759705e-06,
+ "loss": 0.6233,
+ "step": 1956
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.408343816600657e-06,
+ "loss": 0.6413,
+ "step": 1957
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3999316637826486e-06,
+ "loss": 0.7136,
+ "step": 1958
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3915322243775564e-06,
+ "loss": 0.6856,
+ "step": 1959
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3831455124359793e-06,
+ "loss": 0.6222,
+ "step": 1960
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.374771541987232e-06,
+ "loss": 0.7145,
+ "step": 1961
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3664103270393123e-06,
+ "loss": 0.6803,
+ "step": 1962
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.358061881578885e-06,
+ "loss": 0.6292,
+ "step": 1963
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.349726219571239e-06,
+ "loss": 0.6089,
+ "step": 1964
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3414033549603022e-06,
+ "loss": 0.6258,
+ "step": 1965
+ },
+ {
+ "epoch": 0.78,
+ "learning_rate": 2.3330933016685753e-06,
+ "loss": 0.628,
+ "step": 1966
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3247960735971385e-06,
+ "loss": 0.6065,
+ "step": 1967
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.3165116846256154e-06,
+ "loss": 0.6498,
+ "step": 1968
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.308240148612152e-06,
+ "loss": 0.6115,
+ "step": 1969
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.299981479393393e-06,
+ "loss": 0.6953,
+ "step": 1970
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.291735690784461e-06,
+ "loss": 0.6478,
+ "step": 1971
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2835027965789345e-06,
+ "loss": 0.6134,
+ "step": 1972
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.275282810548811e-06,
+ "loss": 0.6625,
+ "step": 1973
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2670757464445146e-06,
+ "loss": 0.5628,
+ "step": 1974
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2588816179948324e-06,
+ "loss": 0.6174,
+ "step": 1975
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2507004389069267e-06,
+ "loss": 0.6719,
+ "step": 1976
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2425322228662914e-06,
+ "loss": 0.6053,
+ "step": 1977
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2343769835367425e-06,
+ "loss": 0.6557,
+ "step": 1978
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.226234734560381e-06,
+ "loss": 0.7008,
+ "step": 1979
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2181054895575847e-06,
+ "loss": 0.6959,
+ "step": 1980
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.2099892621269705e-06,
+ "loss": 0.667,
+ "step": 1981
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.20188606584538e-06,
+ "loss": 0.5813,
+ "step": 1982
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.193795914267871e-06,
+ "loss": 0.6267,
+ "step": 1983
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1857188209276605e-06,
+ "loss": 0.6944,
+ "step": 1984
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1776547993361306e-06,
+ "loss": 0.6951,
+ "step": 1985
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.169603862982801e-06,
+ "loss": 0.6034,
+ "step": 1986
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1615660253352888e-06,
+ "loss": 0.6883,
+ "step": 1987
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.153541299839318e-06,
+ "loss": 0.7422,
+ "step": 1988
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.145529699918666e-06,
+ "loss": 0.6088,
+ "step": 1989
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.1375312389751546e-06,
+ "loss": 0.63,
+ "step": 1990
+ },
+ {
+ "epoch": 0.79,
+ "learning_rate": 2.129545930388627e-06,
+ "loss": 0.6111,
+ "step": 1991
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.121573787516936e-06,
+ "loss": 0.6418,
+ "step": 1992
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.113614823695892e-06,
+ "loss": 0.577,
+ "step": 1993
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.105669052239274e-06,
+ "loss": 0.6697,
+ "step": 1994
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0977364864387896e-06,
+ "loss": 0.7175,
+ "step": 1995
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0898171395640463e-06,
+ "loss": 0.711,
+ "step": 1996
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.081911024862554e-06,
+ "loss": 0.5908,
+ "step": 1997
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.074018155559683e-06,
+ "loss": 0.5889,
+ "step": 1998
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.066138544858637e-06,
+ "loss": 0.6386,
+ "step": 1999
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0582722059404515e-06,
+ "loss": 0.6673,
+ "step": 2000
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.050419151963957e-06,
+ "loss": 0.7005,
+ "step": 2001
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0425793960657623e-06,
+ "loss": 0.6582,
+ "step": 2002
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0347529513602305e-06,
+ "loss": 0.6053,
+ "step": 2003
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0269398309394585e-06,
+ "loss": 0.6509,
+ "step": 2004
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.019140047873246e-06,
+ "loss": 0.6558,
+ "step": 2005
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0113536152091006e-06,
+ "loss": 0.6328,
+ "step": 2006
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 2.0035805459721768e-06,
+ "loss": 0.5952,
+ "step": 2007
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9958208531652876e-06,
+ "loss": 0.6317,
+ "step": 2008
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9880745497688658e-06,
+ "loss": 0.7199,
+ "step": 2009
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9803416487409465e-06,
+ "loss": 0.6881,
+ "step": 2010
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.972622163017145e-06,
+ "loss": 0.6268,
+ "step": 2011
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.964916105510637e-06,
+ "loss": 0.6075,
+ "step": 2012
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9572234891121354e-06,
+ "loss": 0.6415,
+ "step": 2013
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9495443266898615e-06,
+ "loss": 0.6967,
+ "step": 2014
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.9418786310895467e-06,
+ "loss": 0.6688,
+ "step": 2015
+ },
+ {
+ "epoch": 0.8,
+ "learning_rate": 1.934226415134377e-06,
+ "loss": 0.646,
+ "step": 2016
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9265876916250026e-06,
+ "loss": 0.703,
+ "step": 2017
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.918962473339496e-06,
+ "loss": 0.6056,
+ "step": 2018
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9113507730333435e-06,
+ "loss": 0.6404,
+ "step": 2019
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.9037526034394149e-06,
+ "loss": 0.6477,
+ "step": 2020
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8961679772679486e-06,
+ "loss": 0.7063,
+ "step": 2021
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8885969072065224e-06,
+ "loss": 0.6518,
+ "step": 2022
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.88103940592004e-06,
+ "loss": 0.6921,
+ "step": 2023
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.873495486050716e-06,
+ "loss": 0.5789,
+ "step": 2024
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8659651602180295e-06,
+ "loss": 0.6581,
+ "step": 2025
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.858448441018732e-06,
+ "loss": 0.73,
+ "step": 2026
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8509453410268086e-06,
+ "loss": 0.6567,
+ "step": 2027
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8434558727934636e-06,
+ "loss": 0.7118,
+ "step": 2028
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.835980048847098e-06,
+ "loss": 0.6349,
+ "step": 2029
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8285178816932913e-06,
+ "loss": 0.6762,
+ "step": 2030
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8210693838147686e-06,
+ "loss": 0.64,
+ "step": 2031
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8136345676713962e-06,
+ "loss": 0.5908,
+ "step": 2032
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.8062134457001612e-06,
+ "loss": 0.6827,
+ "step": 2033
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7988060303151256e-06,
+ "loss": 0.6266,
+ "step": 2034
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7914123339074351e-06,
+ "loss": 0.6324,
+ "step": 2035
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7840323688452833e-06,
+ "loss": 0.7147,
+ "step": 2036
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7766661474738933e-06,
+ "loss": 0.6591,
+ "step": 2037
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7693136821154989e-06,
+ "loss": 0.6744,
+ "step": 2038
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7619749850693235e-06,
+ "loss": 0.6222,
+ "step": 2039
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7546500686115542e-06,
+ "loss": 0.7077,
+ "step": 2040
+ },
+ {
+ "epoch": 0.81,
+ "learning_rate": 1.7473389449953304e-06,
+ "loss": 0.6864,
+ "step": 2041
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7400416264507192e-06,
+ "loss": 0.6863,
+ "step": 2042
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7327581251846902e-06,
+ "loss": 0.6617,
+ "step": 2043
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7254884533811034e-06,
+ "loss": 0.6427,
+ "step": 2044
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7182326232006875e-06,
+ "loss": 0.6873,
+ "step": 2045
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7109906467810033e-06,
+ "loss": 0.6879,
+ "step": 2046
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.7037625362364585e-06,
+ "loss": 0.6374,
+ "step": 2047
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6965483036582454e-06,
+ "loss": 0.6547,
+ "step": 2048
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6893479611143527e-06,
+ "loss": 0.6814,
+ "step": 2049
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6821615206495313e-06,
+ "loss": 0.7201,
+ "step": 2050
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.674988994285276e-06,
+ "loss": 0.654,
+ "step": 2051
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.667830394019807e-06,
+ "loss": 0.6365,
+ "step": 2052
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6606857318280478e-06,
+ "loss": 0.6938,
+ "step": 2053
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6535550196616102e-06,
+ "loss": 0.645,
+ "step": 2054
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6464382694487579e-06,
+ "loss": 0.5691,
+ "step": 2055
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6393354930944194e-06,
+ "loss": 0.7273,
+ "step": 2056
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6322467024801282e-06,
+ "loss": 0.6804,
+ "step": 2057
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6251719094640328e-06,
+ "loss": 0.6093,
+ "step": 2058
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6181111258808658e-06,
+ "loss": 0.6292,
+ "step": 2059
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.611064363541922e-06,
+ "loss": 0.6445,
+ "step": 2060
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.6040316342350425e-06,
+ "loss": 0.6312,
+ "step": 2061
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.597012949724598e-06,
+ "loss": 0.5113,
+ "step": 2062
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.590008321751454e-06,
+ "loss": 0.7,
+ "step": 2063
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5830177620329712e-06,
+ "loss": 0.6579,
+ "step": 2064
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5760412822629822e-06,
+ "loss": 0.6681,
+ "step": 2065
+ },
+ {
+ "epoch": 0.82,
+ "learning_rate": 1.5690788941117508e-06,
+ "loss": 0.6373,
+ "step": 2066
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5621306092259804e-06,
+ "loss": 0.665,
+ "step": 2067
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5551964392287788e-06,
+ "loss": 0.6302,
+ "step": 2068
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5482763957196423e-06,
+ "loss": 0.615,
+ "step": 2069
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5413704902744364e-06,
+ "loss": 0.6441,
+ "step": 2070
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5344787344453803e-06,
+ "loss": 0.6983,
+ "step": 2071
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5276011397610136e-06,
+ "loss": 0.6721,
+ "step": 2072
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.520737717726195e-06,
+ "loss": 0.5777,
+ "step": 2073
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.5138884798220798e-06,
+ "loss": 0.6403,
+ "step": 2074
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.507053437506083e-06,
+ "loss": 0.6971,
+ "step": 2075
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.500232602211883e-06,
+ "loss": 0.6684,
+ "step": 2076
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4934259853493883e-06,
+ "loss": 0.7197,
+ "step": 2077
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4866335983047264e-06,
+ "loss": 0.685,
+ "step": 2078
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4798554524402176e-06,
+ "loss": 0.5971,
+ "step": 2079
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4730915590943628e-06,
+ "loss": 0.6841,
+ "step": 2080
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.466341929581816e-06,
+ "loss": 0.6073,
+ "step": 2081
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4596065751933763e-06,
+ "loss": 0.6636,
+ "step": 2082
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.452885507195959e-06,
+ "loss": 0.6739,
+ "step": 2083
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4461787368325863e-06,
+ "loss": 0.6785,
+ "step": 2084
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.439486275322357e-06,
+ "loss": 0.6788,
+ "step": 2085
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4328081338604383e-06,
+ "loss": 0.7205,
+ "step": 2086
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4261443236180418e-06,
+ "loss": 0.6954,
+ "step": 2087
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4194948557424083e-06,
+ "loss": 0.668,
+ "step": 2088
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.412859741356778e-06,
+ "loss": 0.6654,
+ "step": 2089
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.4062389915603903e-06,
+ "loss": 0.6034,
+ "step": 2090
+ },
+ {
+ "epoch": 0.83,
+ "learning_rate": 1.3996326174284502e-06,
+ "loss": 0.6057,
+ "step": 2091
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.393040630012118e-06,
+ "loss": 0.6779,
+ "step": 2092
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.386463040338485e-06,
+ "loss": 0.654,
+ "step": 2093
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3798998594105606e-06,
+ "loss": 0.7054,
+ "step": 2094
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3733510982072485e-06,
+ "loss": 0.6796,
+ "step": 2095
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3668167676833332e-06,
+ "loss": 0.6241,
+ "step": 2096
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3602968787694615e-06,
+ "loss": 0.6681,
+ "step": 2097
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3537914423721144e-06,
+ "loss": 0.6793,
+ "step": 2098
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3473004693736037e-06,
+ "loss": 0.5879,
+ "step": 2099
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3408239706320459e-06,
+ "loss": 0.7005,
+ "step": 2100
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3343619569813437e-06,
+ "loss": 0.6827,
+ "step": 2101
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3279144392311693e-06,
+ "loss": 0.5948,
+ "step": 2102
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.3214814281669486e-06,
+ "loss": 0.564,
+ "step": 2103
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.31506293454983e-06,
+ "loss": 0.6511,
+ "step": 2104
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.308658969116694e-06,
+ "loss": 0.6267,
+ "step": 2105
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.302269542580109e-06,
+ "loss": 0.6673,
+ "step": 2106
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2958946656283188e-06,
+ "loss": 0.6731,
+ "step": 2107
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2895343489252355e-06,
+ "loss": 0.5958,
+ "step": 2108
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2831886031104123e-06,
+ "loss": 0.6421,
+ "step": 2109
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2768574387990284e-06,
+ "loss": 0.6706,
+ "step": 2110
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2705408665818707e-06,
+ "loss": 0.6288,
+ "step": 2111
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2642388970253194e-06,
+ "loss": 0.6233,
+ "step": 2112
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2579515406713194e-06,
+ "loss": 0.6499,
+ "step": 2113
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2516788080373766e-06,
+ "loss": 0.615,
+ "step": 2114
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.2454207096165405e-06,
+ "loss": 0.5939,
+ "step": 2115
+ },
+ {
+ "epoch": 0.84,
+ "learning_rate": 1.239177255877365e-06,
+ "loss": 0.6924,
+ "step": 2116
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2329484572639183e-06,
+ "loss": 0.6504,
+ "step": 2117
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2267343241957508e-06,
+ "loss": 0.7062,
+ "step": 2118
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2205348670678774e-06,
+ "loss": 0.641,
+ "step": 2119
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.214350096250767e-06,
+ "loss": 0.6691,
+ "step": 2120
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2081800220903207e-06,
+ "loss": 0.6898,
+ "step": 2121
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.2020246549078497e-06,
+ "loss": 0.7024,
+ "step": 2122
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1958840050000686e-06,
+ "loss": 0.6155,
+ "step": 2123
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.189758082639072e-06,
+ "loss": 0.6448,
+ "step": 2124
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.183646898072318e-06,
+ "loss": 0.6962,
+ "step": 2125
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1775504615226118e-06,
+ "loss": 0.6622,
+ "step": 2126
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1714687831880867e-06,
+ "loss": 0.6913,
+ "step": 2127
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.165401873242188e-06,
+ "loss": 0.6434,
+ "step": 2128
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1593497418336619e-06,
+ "loss": 0.6478,
+ "step": 2129
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1533123990865225e-06,
+ "loss": 0.6226,
+ "step": 2130
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1472898551000544e-06,
+ "loss": 0.6201,
+ "step": 2131
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1412821199487845e-06,
+ "loss": 0.6511,
+ "step": 2132
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.135289203682466e-06,
+ "loss": 0.6732,
+ "step": 2133
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1293111163260639e-06,
+ "loss": 0.593,
+ "step": 2134
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.123347867879737e-06,
+ "loss": 0.6982,
+ "step": 2135
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.117399468318824e-06,
+ "loss": 0.664,
+ "step": 2136
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.1114659275938188e-06,
+ "loss": 0.6698,
+ "step": 2137
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.105547255630368e-06,
+ "loss": 0.6471,
+ "step": 2138
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.099643462329235e-06,
+ "loss": 0.6819,
+ "step": 2139
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0937545575663023e-06,
+ "loss": 0.7174,
+ "step": 2140
+ },
+ {
+ "epoch": 0.85,
+ "learning_rate": 1.0878805511925438e-06,
+ "loss": 0.5619,
+ "step": 2141
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0820214530340124e-06,
+ "loss": 0.6689,
+ "step": 2142
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0761772728918208e-06,
+ "loss": 0.593,
+ "step": 2143
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0703480205421302e-06,
+ "loss": 0.6272,
+ "step": 2144
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0645337057361215e-06,
+ "loss": 0.6157,
+ "step": 2145
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0587343382000027e-06,
+ "loss": 0.6637,
+ "step": 2146
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0529499276349686e-06,
+ "loss": 0.5931,
+ "step": 2147
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0471804837171916e-06,
+ "loss": 0.5576,
+ "step": 2148
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0414260160978141e-06,
+ "loss": 0.6517,
+ "step": 2149
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0356865344029231e-06,
+ "loss": 0.4765,
+ "step": 2150
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.029962048233537e-06,
+ "loss": 0.6353,
+ "step": 2151
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0242525671655912e-06,
+ "loss": 0.6019,
+ "step": 2152
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0185581007499212e-06,
+ "loss": 0.674,
+ "step": 2153
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0128786585122385e-06,
+ "loss": 0.6541,
+ "step": 2154
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0072142499531346e-06,
+ "loss": 0.6578,
+ "step": 2155
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 1.0015648845480453e-06,
+ "loss": 0.5856,
+ "step": 2156
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.95930571747239e-07,
+ "loss": 0.624,
+ "step": 2157
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.903113209758098e-07,
+ "loss": 0.6848,
+ "step": 2158
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.84707141633654e-07,
+ "loss": 0.6183,
+ "step": 2159
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.791180430954562e-07,
+ "loss": 0.6684,
+ "step": 2160
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.735440347106762e-07,
+ "loss": 0.6439,
+ "step": 2161
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.679851258035277e-07,
+ "loss": 0.6669,
+ "step": 2162
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.624413256729636e-07,
+ "loss": 0.5661,
+ "step": 2163
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.569126435926735e-07,
+ "loss": 0.7035,
+ "step": 2164
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.513990888110447e-07,
+ "loss": 0.6304,
+ "step": 2165
+ },
+ {
+ "epoch": 0.86,
+ "learning_rate": 9.459006705511664e-07,
+ "loss": 0.6113,
+ "step": 2166
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.404173980108067e-07,
+ "loss": 0.59,
+ "step": 2167
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.349492803623972e-07,
+ "loss": 0.7032,
+ "step": 2168
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.294963267530177e-07,
+ "loss": 0.7435,
+ "step": 2169
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.240585463043849e-07,
+ "loss": 0.6586,
+ "step": 2170
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.186359481128282e-07,
+ "loss": 0.6089,
+ "step": 2171
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.132285412492825e-07,
+ "loss": 0.6525,
+ "step": 2172
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.078363347592767e-07,
+ "loss": 0.6407,
+ "step": 2173
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 9.024593376629009e-07,
+ "loss": 0.6155,
+ "step": 2174
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.970975589548114e-07,
+ "loss": 0.697,
+ "step": 2175
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.917510076042058e-07,
+ "loss": 0.6343,
+ "step": 2176
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.864196925548063e-07,
+ "loss": 0.552,
+ "step": 2177
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.811036227248515e-07,
+ "loss": 0.6689,
+ "step": 2178
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.758028070070768e-07,
+ "loss": 0.6996,
+ "step": 2179
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.705172542686968e-07,
+ "loss": 0.618,
+ "step": 2180
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.652469733513958e-07,
+ "loss": 0.6943,
+ "step": 2181
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.599919730713191e-07,
+ "loss": 0.6814,
+ "step": 2182
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.547522622190385e-07,
+ "loss": 0.5437,
+ "step": 2183
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.495278495595572e-07,
+ "loss": 0.6067,
+ "step": 2184
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.443187438322864e-07,
+ "loss": 0.636,
+ "step": 2185
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.391249537510271e-07,
+ "loss": 0.6255,
+ "step": 2186
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.339464880039671e-07,
+ "loss": 0.6582,
+ "step": 2187
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.287833552536584e-07,
+ "loss": 0.6685,
+ "step": 2188
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.236355641369975e-07,
+ "loss": 0.7063,
+ "step": 2189
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.185031232652252e-07,
+ "loss": 0.5958,
+ "step": 2190
+ },
+ {
+ "epoch": 0.87,
+ "learning_rate": 8.133860412238992e-07,
+ "loss": 0.5707,
+ "step": 2191
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.082843265728879e-07,
+ "loss": 0.6474,
+ "step": 2192
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 8.031979878463525e-07,
+ "loss": 0.736,
+ "step": 2193
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.981270335527347e-07,
+ "loss": 0.6467,
+ "step": 2194
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.930714721747323e-07,
+ "loss": 0.5904,
+ "step": 2195
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.880313121693073e-07,
+ "loss": 0.6409,
+ "step": 2196
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.830065619676519e-07,
+ "loss": 0.5896,
+ "step": 2197
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.77997229975177e-07,
+ "loss": 0.5863,
+ "step": 2198
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.730033245715063e-07,
+ "loss": 0.6559,
+ "step": 2199
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.680248541104574e-07,
+ "loss": 0.7047,
+ "step": 2200
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.630618269200285e-07,
+ "loss": 0.6859,
+ "step": 2201
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.58114251302382e-07,
+ "loss": 0.6237,
+ "step": 2202
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.531821355338386e-07,
+ "loss": 0.5995,
+ "step": 2203
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.482654878648465e-07,
+ "loss": 0.6618,
+ "step": 2204
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.433643165199933e-07,
+ "loss": 0.6191,
+ "step": 2205
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.384786296979662e-07,
+ "loss": 0.6014,
+ "step": 2206
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.336084355715555e-07,
+ "loss": 0.6178,
+ "step": 2207
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.287537422876323e-07,
+ "loss": 0.5889,
+ "step": 2208
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.239145579671414e-07,
+ "loss": 0.6885,
+ "step": 2209
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.190908907050808e-07,
+ "loss": 0.6745,
+ "step": 2210
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.142827485704951e-07,
+ "loss": 0.6295,
+ "step": 2211
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.094901396064535e-07,
+ "loss": 0.6731,
+ "step": 2212
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 7.047130718300433e-07,
+ "loss": 0.6816,
+ "step": 2213
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.999515532323598e-07,
+ "loss": 0.5576,
+ "step": 2214
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.952055917784783e-07,
+ "loss": 0.6609,
+ "step": 2215
+ },
+ {
+ "epoch": 0.88,
+ "learning_rate": 6.904751954074574e-07,
+ "loss": 0.7437,
+ "step": 2216
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.857603720323136e-07,
+ "loss": 0.6699,
+ "step": 2217
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.810611295400171e-07,
+ "loss": 0.701,
+ "step": 2218
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.76377475791471e-07,
+ "loss": 0.6558,
+ "step": 2219
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.717094186215056e-07,
+ "loss": 0.6553,
+ "step": 2220
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.670569658388537e-07,
+ "loss": 0.7503,
+ "step": 2221
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.624201252261497e-07,
+ "loss": 0.6251,
+ "step": 2222
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.57798904539918e-07,
+ "loss": 0.6262,
+ "step": 2223
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.531933115105427e-07,
+ "loss": 0.6704,
+ "step": 2224
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.48603353842272e-07,
+ "loss": 0.5853,
+ "step": 2225
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.440290392131998e-07,
+ "loss": 0.6876,
+ "step": 2226
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.394703752752474e-07,
+ "loss": 0.6672,
+ "step": 2227
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.349273696541625e-07,
+ "loss": 0.6164,
+ "step": 2228
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.304000299494961e-07,
+ "loss": 0.6094,
+ "step": 2229
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.258883637345914e-07,
+ "loss": 0.6371,
+ "step": 2230
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.213923785565723e-07,
+ "loss": 0.6667,
+ "step": 2231
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.169120819363406e-07,
+ "loss": 0.6714,
+ "step": 2232
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.124474813685422e-07,
+ "loss": 0.6127,
+ "step": 2233
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.079985843215719e-07,
+ "loss": 0.676,
+ "step": 2234
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 6.0356539823756e-07,
+ "loss": 0.6677,
+ "step": 2235
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.991479305323433e-07,
+ "loss": 0.6894,
+ "step": 2236
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.947461885954798e-07,
+ "loss": 0.6284,
+ "step": 2237
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.903601797902126e-07,
+ "loss": 0.7177,
+ "step": 2238
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.859899114534662e-07,
+ "loss": 0.6462,
+ "step": 2239
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.816353908958361e-07,
+ "loss": 0.6722,
+ "step": 2240
+ },
+ {
+ "epoch": 0.89,
+ "learning_rate": 5.772966254015755e-07,
+ "loss": 0.6363,
+ "step": 2241
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.729736222285832e-07,
+ "loss": 0.6908,
+ "step": 2242
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.686663886083877e-07,
+ "loss": 0.6389,
+ "step": 2243
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.643749317461422e-07,
+ "loss": 0.6905,
+ "step": 2244
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.600992588206011e-07,
+ "loss": 0.6464,
+ "step": 2245
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.558393769841286e-07,
+ "loss": 0.6127,
+ "step": 2246
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.51595293362659e-07,
+ "loss": 0.7343,
+ "step": 2247
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.473670150557076e-07,
+ "loss": 0.6288,
+ "step": 2248
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.431545491363488e-07,
+ "loss": 0.6785,
+ "step": 2249
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.389579026512059e-07,
+ "loss": 0.6499,
+ "step": 2250
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.347770826204401e-07,
+ "loss": 0.7327,
+ "step": 2251
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.306120960377381e-07,
+ "loss": 0.656,
+ "step": 2252
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.264629498702966e-07,
+ "loss": 0.6279,
+ "step": 2253
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.223296510588182e-07,
+ "loss": 0.5952,
+ "step": 2254
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.182122065174988e-07,
+ "loss": 0.6165,
+ "step": 2255
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.141106231340065e-07,
+ "loss": 0.6083,
+ "step": 2256
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.100249077694797e-07,
+ "loss": 0.6501,
+ "step": 2257
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.059550672585145e-07,
+ "loss": 0.6167,
+ "step": 2258
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 5.019011084091496e-07,
+ "loss": 0.658,
+ "step": 2259
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.978630380028582e-07,
+ "loss": 0.6504,
+ "step": 2260
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.938408627945335e-07,
+ "loss": 0.618,
+ "step": 2261
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.898345895124801e-07,
+ "loss": 0.6959,
+ "step": 2262
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.858442248583995e-07,
+ "loss": 0.6264,
+ "step": 2263
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.818697755073876e-07,
+ "loss": 0.6425,
+ "step": 2264
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.779112481079107e-07,
+ "loss": 0.5697,
+ "step": 2265
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.7396864928180296e-07,
+ "loss": 0.612,
+ "step": 2266
+ },
+ {
+ "epoch": 0.9,
+ "learning_rate": 4.700419856242555e-07,
+ "loss": 0.6604,
+ "step": 2267
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6613126370379627e-07,
+ "loss": 0.7145,
+ "step": 2268
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.6223649006229553e-07,
+ "loss": 0.7086,
+ "step": 2269
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.583576712149385e-07,
+ "loss": 0.5782,
+ "step": 2270
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5449481365022143e-07,
+ "loss": 0.599,
+ "step": 2271
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.5064792382994216e-07,
+ "loss": 0.6503,
+ "step": 2272
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.468170081891898e-07,
+ "loss": 0.6779,
+ "step": 2273
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.4300207313632713e-07,
+ "loss": 0.63,
+ "step": 2274
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.392031250529871e-07,
+ "loss": 0.5934,
+ "step": 2275
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.3542017029406083e-07,
+ "loss": 0.5983,
+ "step": 2276
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.316532151876807e-07,
+ "loss": 0.6947,
+ "step": 2277
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.279022660352228e-07,
+ "loss": 0.6996,
+ "step": 2278
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2416732911128247e-07,
+ "loss": 0.6989,
+ "step": 2279
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.2044841066367084e-07,
+ "loss": 0.6674,
+ "step": 2280
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.167455169134027e-07,
+ "loss": 0.6798,
+ "step": 2281
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.130586540546899e-07,
+ "loss": 0.6969,
+ "step": 2282
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.093878282549246e-07,
+ "loss": 0.6513,
+ "step": 2283
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.057330456546737e-07,
+ "loss": 0.7171,
+ "step": 2284
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 4.0209431236766793e-07,
+ "loss": 0.628,
+ "step": 2285
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9847163448078373e-07,
+ "loss": 0.6772,
+ "step": 2286
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9486501805405253e-07,
+ "loss": 0.6523,
+ "step": 2287
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.9127446912062606e-07,
+ "loss": 0.5986,
+ "step": 2288
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.876999936867865e-07,
+ "loss": 0.6105,
+ "step": 2289
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8414159773192294e-07,
+ "loss": 0.6245,
+ "step": 2290
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.8059928720852957e-07,
+ "loss": 0.6183,
+ "step": 2291
+ },
+ {
+ "epoch": 0.91,
+ "learning_rate": 3.770730680421919e-07,
+ "loss": 0.6357,
+ "step": 2292
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.735629461315804e-07,
+ "loss": 0.5883,
+ "step": 2293
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.7006892734843145e-07,
+ "loss": 0.6541,
+ "step": 2294
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6659101753754975e-07,
+ "loss": 0.6448,
+ "step": 2295
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.6312922251679373e-07,
+ "loss": 0.6369,
+ "step": 2296
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5968354807705994e-07,
+ "loss": 0.6756,
+ "step": 2297
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.5625399998228116e-07,
+ "loss": 0.6197,
+ "step": 2298
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.528405839694149e-07,
+ "loss": 0.6209,
+ "step": 2299
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.4944330574843144e-07,
+ "loss": 0.661,
+ "step": 2300
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.46062171002306e-07,
+ "loss": 0.6924,
+ "step": 2301
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.426971853870109e-07,
+ "loss": 0.5734,
+ "step": 2302
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3934835453149797e-07,
+ "loss": 0.6674,
+ "step": 2303
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.360156840377027e-07,
+ "loss": 0.6422,
+ "step": 2304
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.3269917948052454e-07,
+ "loss": 0.6915,
+ "step": 2305
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.293988464078202e-07,
+ "loss": 0.6663,
+ "step": 2306
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2611469034039334e-07,
+ "loss": 0.6566,
+ "step": 2307
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.2284671677199065e-07,
+ "loss": 0.6534,
+ "step": 2308
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.1959493116928473e-07,
+ "loss": 0.5859,
+ "step": 2309
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.163593389718711e-07,
+ "loss": 0.6704,
+ "step": 2310
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.131399455922568e-07,
+ "loss": 0.6283,
+ "step": 2311
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.099367564158484e-07,
+ "loss": 0.5559,
+ "step": 2312
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.0674977680095086e-07,
+ "loss": 0.6583,
+ "step": 2313
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.03579012078753e-07,
+ "loss": 0.6516,
+ "step": 2314
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 3.004244675533141e-07,
+ "loss": 0.6294,
+ "step": 2315
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.972861485015666e-07,
+ "loss": 0.6758,
+ "step": 2316
+ },
+ {
+ "epoch": 0.92,
+ "learning_rate": 2.941640601732976e-07,
+ "loss": 0.5914,
+ "step": 2317
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.910582077911428e-07,
+ "loss": 0.5644,
+ "step": 2318
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8796859655058184e-07,
+ "loss": 0.7371,
+ "step": 2319
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.8489523161992385e-07,
+ "loss": 0.6257,
+ "step": 2320
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.818381181402985e-07,
+ "loss": 0.6229,
+ "step": 2321
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.787972612256551e-07,
+ "loss": 0.6735,
+ "step": 2322
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.7577266596274577e-07,
+ "loss": 0.6351,
+ "step": 2323
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.727643374111222e-07,
+ "loss": 0.6203,
+ "step": 2324
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.697722806031211e-07,
+ "loss": 0.5875,
+ "step": 2325
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.667965005438644e-07,
+ "loss": 0.6535,
+ "step": 2326
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6383700221124355e-07,
+ "loss": 0.6172,
+ "step": 2327
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.6089379055591613e-07,
+ "loss": 0.6203,
+ "step": 2328
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.579668705012917e-07,
+ "loss": 0.6511,
+ "step": 2329
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5505624694353027e-07,
+ "loss": 0.6305,
+ "step": 2330
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.5216192475152945e-07,
+ "loss": 0.6641,
+ "step": 2331
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.492839087669197e-07,
+ "loss": 0.6764,
+ "step": 2332
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.464222038040509e-07,
+ "loss": 0.7149,
+ "step": 2333
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.435768146499939e-07,
+ "loss": 0.6038,
+ "step": 2334
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.407477460645191e-07,
+ "loss": 0.6738,
+ "step": 2335
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3793500278009974e-07,
+ "loss": 0.6688,
+ "step": 2336
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3513858950190206e-07,
+ "loss": 0.6531,
+ "step": 2337
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.3235851090777084e-07,
+ "loss": 0.6629,
+ "step": 2338
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2959477164822942e-07,
+ "loss": 0.5791,
+ "step": 2339
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2684737634646737e-07,
+ "loss": 0.6922,
+ "step": 2340
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2411632959833285e-07,
+ "loss": 0.6456,
+ "step": 2341
+ },
+ {
+ "epoch": 0.93,
+ "learning_rate": 2.2140163597233033e-07,
+ "loss": 0.6787,
+ "step": 2342
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1870330000960504e-07,
+ "loss": 0.6969,
+ "step": 2343
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1602132622393745e-07,
+ "loss": 0.6884,
+ "step": 2344
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1335571910174214e-07,
+ "loss": 0.6462,
+ "step": 2345
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.1070648310205443e-07,
+ "loss": 0.6971,
+ "step": 2346
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0807362265651943e-07,
+ "loss": 0.6738,
+ "step": 2347
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0545714216939515e-07,
+ "loss": 0.6568,
+ "step": 2348
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0285704601753608e-07,
+ "loss": 0.6191,
+ "step": 2349
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 2.0027333855038967e-07,
+ "loss": 0.6717,
+ "step": 2350
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9770602408998642e-07,
+ "loss": 0.6127,
+ "step": 2351
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9515510693093875e-07,
+ "loss": 0.7501,
+ "step": 2352
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9262059134042443e-07,
+ "loss": 0.7352,
+ "step": 2353
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.9010248155818755e-07,
+ "loss": 0.6478,
+ "step": 2354
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8760078179653196e-07,
+ "loss": 0.5576,
+ "step": 2355
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8511549624030346e-07,
+ "loss": 0.6564,
+ "step": 2356
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8264662904689533e-07,
+ "loss": 0.6123,
+ "step": 2357
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.8019418434623405e-07,
+ "loss": 0.6476,
+ "step": 2358
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7775816624077458e-07,
+ "loss": 0.6793,
+ "step": 2359
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.753385788054962e-07,
+ "loss": 0.6139,
+ "step": 2360
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7293542608788905e-07,
+ "loss": 0.636,
+ "step": 2361
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.7054871210795188e-07,
+ "loss": 0.683,
+ "step": 2362
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6817844085818768e-07,
+ "loss": 0.5975,
+ "step": 2363
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6582461630359149e-07,
+ "loss": 0.6527,
+ "step": 2364
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6348724238164583e-07,
+ "loss": 0.6235,
+ "step": 2365
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.6116632300231638e-07,
+ "loss": 0.6691,
+ "step": 2366
+ },
+ {
+ "epoch": 0.94,
+ "learning_rate": 1.58861862048042e-07,
+ "loss": 0.6824,
+ "step": 2367
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5657386337373237e-07,
+ "loss": 0.6828,
+ "step": 2368
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.5430233080675593e-07,
+ "loss": 0.7232,
+ "step": 2369
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.520472681469376e-07,
+ "loss": 0.647,
+ "step": 2370
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4980867916655205e-07,
+ "loss": 0.5638,
+ "step": 2371
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.475865676103161e-07,
+ "loss": 0.6725,
+ "step": 2372
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4538093719538404e-07,
+ "loss": 0.6748,
+ "step": 2373
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.431917916113401e-07,
+ "loss": 0.6824,
+ "step": 2374
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.4101913452019277e-07,
+ "loss": 0.5894,
+ "step": 2375
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3886296955636481e-07,
+ "loss": 0.6944,
+ "step": 2376
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3672330032669767e-07,
+ "loss": 0.6719,
+ "step": 2377
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.3460013041043606e-07,
+ "loss": 0.5259,
+ "step": 2378
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.324934633592201e-07,
+ "loss": 0.6447,
+ "step": 2379
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.304033026970908e-07,
+ "loss": 0.6601,
+ "step": 2380
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.283296519204713e-07,
+ "loss": 0.6522,
+ "step": 2381
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2627251449817247e-07,
+ "loss": 0.6527,
+ "step": 2382
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2423189387137713e-07,
+ "loss": 0.6067,
+ "step": 2383
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2220779345364143e-07,
+ "loss": 0.6662,
+ "step": 2384
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.2020021663088244e-07,
+ "loss": 0.7076,
+ "step": 2385
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1820916676138384e-07,
+ "loss": 0.6016,
+ "step": 2386
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1623464717577804e-07,
+ "loss": 0.6738,
+ "step": 2387
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1427666117704406e-07,
+ "loss": 0.7113,
+ "step": 2388
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1233521204050634e-07,
+ "loss": 0.7239,
+ "step": 2389
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.1041030301382705e-07,
+ "loss": 0.6534,
+ "step": 2390
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.085019373169971e-07,
+ "loss": 0.6481,
+ "step": 2391
+ },
+ {
+ "epoch": 0.95,
+ "learning_rate": 1.0661011814233624e-07,
+ "loss": 0.6562,
+ "step": 2392
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0473484865448524e-07,
+ "loss": 0.6565,
+ "step": 2393
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0287613199039592e-07,
+ "loss": 0.6853,
+ "step": 2394
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 1.0103397125933778e-07,
+ "loss": 0.6637,
+ "step": 2395
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.920836954288137e-08,
+ "loss": 0.6167,
+ "step": 2396
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.739932989489498e-08,
+ "loss": 0.5238,
+ "step": 2397
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.56068553415479e-08,
+ "loss": 0.6711,
+ "step": 2398
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.383094888129274e-08,
+ "loss": 0.6423,
+ "step": 2399
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.207161348487315e-08,
+ "loss": 0.6372,
+ "step": 2400
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 9.032885209530717e-08,
+ "loss": 0.5929,
+ "step": 2401
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.860266762789283e-08,
+ "loss": 0.5545,
+ "step": 2402
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.689306297019362e-08,
+ "loss": 0.6709,
+ "step": 2403
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.520004098204193e-08,
+ "loss": 0.5512,
+ "step": 2404
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.352360449552787e-08,
+ "loss": 0.6216,
+ "step": 2405
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.186375631499821e-08,
+ "loss": 0.6481,
+ "step": 2406
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 8.0220499217053e-08,
+ "loss": 0.6893,
+ "step": 2407
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.859383595053449e-08,
+ "loss": 0.6251,
+ "step": 2408
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.69837692365305e-08,
+ "loss": 0.6201,
+ "step": 2409
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.539030176836549e-08,
+ "loss": 0.6243,
+ "step": 2410
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.381343621159275e-08,
+ "loss": 0.6331,
+ "step": 2411
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.2253175203999e-08,
+ "loss": 0.634,
+ "step": 2412
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 7.070952135559195e-08,
+ "loss": 0.5522,
+ "step": 2413
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.918247724859939e-08,
+ "loss": 0.6122,
+ "step": 2414
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.767204543746463e-08,
+ "loss": 0.6385,
+ "step": 2415
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.617822844884325e-08,
+ "loss": 0.6236,
+ "step": 2416
+ },
+ {
+ "epoch": 0.96,
+ "learning_rate": 6.470102878159301e-08,
+ "loss": 0.6495,
+ "step": 2417
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.324044890677839e-08,
+ "loss": 0.6407,
+ "step": 2418
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.179649126766274e-08,
+ "loss": 0.5926,
+ "step": 2419
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 6.036915827969947e-08,
+ "loss": 0.6534,
+ "step": 2420
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.895845233053643e-08,
+ "loss": 0.6524,
+ "step": 2421
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.7564375780004843e-08,
+ "loss": 0.6495,
+ "step": 2422
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.618693096011929e-08,
+ "loss": 0.6649,
+ "step": 2423
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.482612017507327e-08,
+ "loss": 0.5616,
+ "step": 2424
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.348194570123588e-08,
+ "loss": 0.7055,
+ "step": 2425
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.2154409787141815e-08,
+ "loss": 0.5911,
+ "step": 2426
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 5.084351465350024e-08,
+ "loss": 0.6666,
+ "step": 2427
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.954926249317815e-08,
+ "loss": 0.6756,
+ "step": 2428
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.8271655471202605e-08,
+ "loss": 0.7016,
+ "step": 2429
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.701069572475958e-08,
+ "loss": 0.6367,
+ "step": 2430
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.5766385363184006e-08,
+ "loss": 0.6855,
+ "step": 2431
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.453872646796309e-08,
+ "loss": 0.6256,
+ "step": 2432
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.3327721092726314e-08,
+ "loss": 0.7018,
+ "step": 2433
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.213337126324768e-08,
+ "loss": 0.615,
+ "step": 2434
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 4.0955678977436796e-08,
+ "loss": 0.5576,
+ "step": 2435
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.979464620534113e-08,
+ "loss": 0.6092,
+ "step": 2436
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.8650274889139306e-08,
+ "loss": 0.6615,
+ "step": 2437
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.752256694313783e-08,
+ "loss": 0.5906,
+ "step": 2438
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.641152425376992e-08,
+ "loss": 0.6324,
+ "step": 2439
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.531714867959113e-08,
+ "loss": 0.6368,
+ "step": 2440
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.423944205127372e-08,
+ "loss": 0.5854,
+ "step": 2441
+ },
+ {
+ "epoch": 0.97,
+ "learning_rate": 3.3178406171608946e-08,
+ "loss": 0.5899,
+ "step": 2442
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.213404281550148e-08,
+ "loss": 0.6701,
+ "step": 2443
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.1106353729962734e-08,
+ "loss": 0.6724,
+ "step": 2444
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 3.009534063411534e-08,
+ "loss": 0.6687,
+ "step": 2445
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.910100521918313e-08,
+ "loss": 0.6591,
+ "step": 2446
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.812334914849335e-08,
+ "loss": 0.6554,
+ "step": 2447
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.716237405747113e-08,
+ "loss": 0.6019,
+ "step": 2448
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.6218081553638363e-08,
+ "loss": 0.6253,
+ "step": 2449
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.5290473216610378e-08,
+ "loss": 0.62,
+ "step": 2450
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.4379550598092604e-08,
+ "loss": 0.6332,
+ "step": 2451
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.3485315221877246e-08,
+ "loss": 0.6656,
+ "step": 2452
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.2607768583843282e-08,
+ "loss": 0.6035,
+ "step": 2453
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.1746912151955346e-08,
+ "loss": 0.7093,
+ "step": 2454
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.090274736625486e-08,
+ "loss": 0.6739,
+ "step": 2455
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 2.0075275638862247e-08,
+ "loss": 0.6386,
+ "step": 2456
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.9264498353974704e-08,
+ "loss": 0.6331,
+ "step": 2457
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.8470416867861775e-08,
+ "loss": 0.6513,
+ "step": 2458
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.769303250886534e-08,
+ "loss": 0.6153,
+ "step": 2459
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6932346577397396e-08,
+ "loss": 0.6646,
+ "step": 2460
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.6188360345932297e-08,
+ "loss": 0.6028,
+ "step": 2461
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.546107505901451e-08,
+ "loss": 0.6615,
+ "step": 2462
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4750491933247513e-08,
+ "loss": 0.7005,
+ "step": 2463
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.4056612157297145e-08,
+ "loss": 0.6629,
+ "step": 2464
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.3379436891886034e-08,
+ "loss": 0.5983,
+ "step": 2465
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2718967269795823e-08,
+ "loss": 0.581,
+ "step": 2466
+ },
+ {
+ "epoch": 0.98,
+ "learning_rate": 1.2075204395860518e-08,
+ "loss": 0.7026,
+ "step": 2467
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.1448149346969806e-08,
+ "loss": 0.6869,
+ "step": 2468
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.083780317206129e-08,
+ "loss": 0.6595,
+ "step": 2469
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.0244166892124929e-08,
+ "loss": 0.6626,
+ "step": 2470
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.667241500196378e-09,
+ "loss": 0.6575,
+ "step": 2471
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 9.107027961356985e-09,
+ "loss": 0.6564,
+ "step": 2472
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.563527212734902e-09,
+ "loss": 0.6686,
+ "step": 2473
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 8.036740163498425e-09,
+ "loss": 0.6661,
+ "step": 2474
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.526667694858214e-09,
+ "loss": 0.6533,
+ "step": 2475
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 7.033310660065074e-09,
+ "loss": 0.6499,
+ "step": 2476
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.556669884408839e-09,
+ "loss": 0.5755,
+ "step": 2477
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 6.09674616521283e-09,
+ "loss": 0.6725,
+ "step": 2478
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.653540271841618e-09,
+ "loss": 0.6481,
+ "step": 2479
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 5.2270529456888155e-09,
+ "loss": 0.7183,
+ "step": 2480
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.817284900183738e-09,
+ "loss": 0.6772,
+ "step": 2481
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.424236820789185e-09,
+ "loss": 0.6838,
+ "step": 2482
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 4.047909364994773e-09,
+ "loss": 0.7155,
+ "step": 2483
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.688303162322493e-09,
+ "loss": 0.7231,
+ "step": 2484
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.3454188143233758e-09,
+ "loss": 0.6317,
+ "step": 2485
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 3.019256894575273e-09,
+ "loss": 0.6341,
+ "step": 2486
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.7098179486806376e-09,
+ "loss": 0.6622,
+ "step": 2487
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.4171024942720723e-09,
+ "loss": 0.6359,
+ "step": 2488
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 2.14111102100345e-09,
+ "loss": 0.6076,
+ "step": 2489
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.881843990554355e-09,
+ "loss": 0.7048,
+ "step": 2490
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.6393018366278601e-09,
+ "loss": 0.6146,
+ "step": 2491
+ },
+ {
+ "epoch": 0.99,
+ "learning_rate": 1.4134849649471982e-09,
+ "loss": 0.6865,
+ "step": 2492
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.2043937532613126e-09,
+ "loss": 0.6612,
+ "step": 2493
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.0120285513381957e-09,
+ "loss": 0.6172,
+ "step": 2494
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.363896809659989e-10,
+ "loss": 0.7244,
+ "step": 2495
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 6.774774359541436e-10,
+ "loss": 0.6271,
+ "step": 2496
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 5.352920821311002e-10,
+ "loss": 0.6493,
+ "step": 2497
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 4.0983385734660875e-10,
+ "loss": 0.6375,
+ "step": 2498
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.011029714650171e-10,
+ "loss": 0.6227,
+ "step": 2499
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 2.090996063741635e-10,
+ "loss": 0.6493,
+ "step": 2500
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 1.3382391597538403e-10,
+ "loss": 0.5727,
+ "step": 2501
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 7.527602619017415e-11,
+ "loss": 0.6701,
+ "step": 2502
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 3.3456034959078456e-11,
+ "loss": 0.5792,
+ "step": 2503
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 8.364012237249698e-12,
+ "loss": 0.5875,
+ "step": 2504
+ },
+ {
+ "epoch": 1.0,
+ "learning_rate": 0.0,
+ "loss": 0.6329,
+ "step": 2505
+ },
+ {
+ "epoch": 1.0,
+ "step": 2505,
+ "total_flos": 363864595365888.0,
+ "train_loss": 0.6966493598477331,
+ "train_runtime": 6078.687,
+ "train_samples_per_second": 52.736,
+ "train_steps_per_second": 0.412
+ }
+ ],
+ "logging_steps": 1.0,
+ "max_steps": 2505,
+ "num_input_tokens_seen": 0,
+ "num_train_epochs": 1,
+ "save_steps": 50000,
+ "total_flos": 363864595365888.0,
+ "train_batch_size": 16,
+ "trial_name": null,
+ "trial_params": null
+}
diff --git a/ICM-LLaVA-v1.6-7B/training_args.bin b/ICM-LLaVA-v1.6-7B/training_args.bin
new file mode 100644
index 0000000000000000000000000000000000000000..3b7ffcfea72bc891a30cfd3102f7aa3a1614f520
--- /dev/null
+++ b/ICM-LLaVA-v1.6-7B/training_args.bin
@@ -0,0 +1,3 @@
+version https://git-lfs.github.com/spec/v1
+oid sha256:fa15bf6bd426c445074768ad2dbeee998a2e2d0a27035e3cca9d1cc693b21d57
+size 6776
diff --git a/push.sh b/push.sh
new file mode 100644
index 0000000000000000000000000000000000000000..b96d53fe55edb204912168d67309092a11fd4fd3
--- /dev/null
+++ b/push.sh
@@ -0,0 +1,3 @@
+git add .
+git commit -m "commit models"
+git push