| { | |
| "_commit_hash": null, | |
| "architectures": [ | |
| "InternVLChatModel" | |
| ], | |
| "auto_map": { | |
| "AutoConfig": "configuration_internvl_chat.InternVLChatConfig", | |
| "AutoModel": "modeling_internvl_chat.InternVLChatModel", | |
| "AutoModelForCausalLM": "modeling_internvl_chat.InternVLChatModel" | |
| }, | |
| "downsample_ratio": 0.5, | |
| "dynamic_image_size": true, | |
| "force_image_size": 448, | |
| "llm_config": { | |
| "_name_or_path": "microsoft/Phi-3-mini-128k-instruct", | |
| "add_cross_attention": false, | |
| "architectures": [ | |
| "Phi3ForCausalLM" | |
| ], | |
| "attn_implementation": "flash_attention_2", | |
| "attention_dropout": 0.0, | |
| "auto_map": { | |
| "AutoConfig": "configuration_phi3.Phi3Config", | |
| "AutoModelForCausalLM": "modeling_phi3.Phi3ForCausalLM" | |
| }, | |
| "bad_words_ids": null, | |
| "begin_suppress_tokens": null, | |
| "bos_token_id": 1, | |
| "chunk_size_feed_forward": 0, | |
| "cross_attention_hidden_size": null, | |
| "decoder_start_token_id": null, | |
| "diversity_penalty": 0.0, | |
| "do_sample": false, | |
| "early_stopping": false, | |
| "embd_pdrop": 0.0, | |
| "encoder_no_repeat_ngram_size": 0, | |
| "eos_token_id": 32000, | |
| "exponential_decay_length_penalty": null, | |
| "finetuning_task": null, | |
| "forced_bos_token_id": null, | |
| "forced_eos_token_id": null, | |
| "hidden_act": "silu", | |
| "hidden_size": 3072, | |
| "id2label": { | |
| "0": "LABEL_0", | |
| "1": "LABEL_1" | |
| }, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 8192, | |
| "is_decoder": false, | |
| "is_encoder_decoder": false, | |
| "label2id": { | |
| "LABEL_0": 0, | |
| "LABEL_1": 1 | |
| }, | |
| "length_penalty": 1.0, | |
| "max_length": 20, | |
| "max_position_embeddings": 131072, | |
| "min_length": 0, | |
| "model_type": "phi3", | |
| "no_repeat_ngram_size": 0, | |
| "num_attention_heads": 32, | |
| "num_beam_groups": 1, | |
| "num_beams": 1, | |
| "num_hidden_layers": 32, | |
| "num_key_value_heads": 32, | |
| "num_return_sequences": 1, | |
| "original_max_position_embeddings": 4096, | |
| "output_attentions": false, | |
| "output_hidden_states": false, | |
| "output_scores": false, | |
| "pad_token_id": 32000, | |
| "prefix": null, | |
| "problem_type": null, | |
| "pruned_heads": {}, | |
| "remove_invalid_values": false, | |
| "repetition_penalty": 1.0, | |
| "resid_pdrop": 0.0, | |
| "return_dict": true, | |
| "return_dict_in_generate": false, | |
| "rms_norm_eps": 1e-05, | |
| "rope_scaling": { | |
| "long_factor": [ | |
| 1.0299999713897705, | |
| 1.0499999523162842, | |
| 1.0499999523162842, | |
| 1.0799999237060547, | |
| 1.2299998998641968, | |
| 1.2299998998641968, | |
| 1.2999999523162842, | |
| 1.4499999284744263, | |
| 1.5999999046325684, | |
| 1.6499998569488525, | |
| 1.8999998569488525, | |
| 2.859999895095825, | |
| 3.68999981880188, | |
| 5.419999599456787, | |
| 5.489999771118164, | |
| 5.489999771118164, | |
| 9.09000015258789, | |
| 11.579999923706055, | |
| 15.65999984741211, | |
| 15.769999504089355, | |
| 15.789999961853027, | |
| 18.360000610351562, | |
| 21.989999771118164, | |
| 23.079999923706055, | |
| 30.009998321533203, | |
| 32.35000228881836, | |
| 32.590003967285156, | |
| 35.56000518798828, | |
| 39.95000457763672, | |
| 53.840003967285156, | |
| 56.20000457763672, | |
| 57.95000457763672, | |
| 59.29000473022461, | |
| 59.77000427246094, | |
| 59.920005798339844, | |
| 61.190006256103516, | |
| 61.96000671386719, | |
| 62.50000762939453, | |
| 63.3700065612793, | |
| 63.48000717163086, | |
| 63.48000717163086, | |
| 63.66000747680664, | |
| 63.850006103515625, | |
| 64.08000946044922, | |
| 64.760009765625, | |
| 64.80001068115234, | |
| 64.81001281738281, | |
| 64.81001281738281 | |
| ], | |
| "short_factor": [ | |
| 1.05, | |
| 1.05, | |
| 1.05, | |
| 1.1, | |
| 1.1, | |
| 1.1500000000000001, | |
| 1.2000000000000002, | |
| 1.2500000000000002, | |
| 1.3000000000000003, | |
| 1.3500000000000003, | |
| 1.5000000000000004, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.000000000000001, | |
| 2.0500000000000007, | |
| 2.0500000000000007, | |
| 2.0500000000000007, | |
| 2.1000000000000005, | |
| 2.1000000000000005, | |
| 2.1000000000000005, | |
| 2.1500000000000004, | |
| 2.1500000000000004, | |
| 2.3499999999999996, | |
| 2.549999999999999, | |
| 2.5999999999999988, | |
| 2.5999999999999988, | |
| 2.7499999999999982, | |
| 2.849999999999998, | |
| 2.849999999999998, | |
| 2.9499999999999975 | |
| ], | |
| "type": "su" | |
| }, | |
| "rope_theta": 10000.0, | |
| "sep_token_id": null, | |
| "sliding_window": 262144, | |
| "suppress_tokens": null, | |
| "task_specific_params": null, | |
| "temperature": 1.0, | |
| "tf_legacy_loss": false, | |
| "tie_encoder_decoder": false, | |
| "tie_word_embeddings": false, | |
| "tokenizer_class": null, | |
| "top_k": 50, | |
| "top_p": 1.0, | |
| "torch_dtype": "bfloat16", | |
| "torchscript": false, | |
| "transformers_version": "4.37.2", | |
| "typical_p": 1.0, | |
| "use_bfloat16": true, | |
| "use_cache": true, | |
| "vocab_size": 32020 | |
| }, | |
| "max_dynamic_patch": 12, | |
| "min_dynamic_patch": 1, | |
| "model_type": "internvl_chat", | |
| "ps_version": "v2", | |
| "select_layer": -1, | |
| "template": "phi3-chat", | |
| "torch_dtype": "bfloat16", | |
| "use_backbone_lora": 0, | |
| "use_llm_lora": 0, | |
| "use_thumbnail": true, | |
| "vision_config": { | |
| "architectures": [ | |
| "InternVisionModel" | |
| ], | |
| "attention_dropout": 0.0, | |
| "drop_path_rate": 0.0, | |
| "dropout": 0.0, | |
| "hidden_act": "gelu", | |
| "hidden_size": 1024, | |
| "image_size": 448, | |
| "initializer_factor": 1.0, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 4096, | |
| "layer_norm_eps": 1e-06, | |
| "model_type": "intern_vit_6b", | |
| "norm_type": "layer_norm", | |
| "num_attention_heads": 16, | |
| "num_channels": 3, | |
| "num_hidden_layers": 24, | |
| "output_attentions": false, | |
| "output_hidden_states": false, | |
| "patch_size": 14, | |
| "qk_normalization": false, | |
| "qkv_bias": true, | |
| "return_dict": true, | |
| "torch_dtype": "bfloat16", | |
| "transformers_version": "4.37.2", | |
| "use_bfloat16": true, | |
| "use_flash_attn": true | |
| } | |
| } | |