lym0302 commited on
Commit
0c387fb
·
verified ·
1 Parent(s): 5d465e5

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -3
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "pretrained_models/VideoLLaMA2.1-7B-AV",
3
  "architectures": [
4
  "Videollama2Qwen2ForCausalLM"
5
  ],
@@ -15,7 +15,7 @@
15
  "intermediate_size": 18944,
16
  "max_position_embeddings": 32768,
17
  "max_window_layers": 28,
18
- "mm_audio_tower": "pretrained_models/VideoLLaMA2.1-7B-AV/audio_tower.bin",
19
  "mm_hidden_size": 1152,
20
  "mm_hidden_size_a": 768,
21
  "mm_projector_a_type": "mlp2x_gelu",
@@ -23,7 +23,7 @@
23
  "mm_projector_type": "stc_connector_v35",
24
  "mm_vision_select_feature": "patch",
25
  "mm_vision_select_layer": -2,
26
- "mm_vision_tower": "/ailab-train/speech/liangyunming/20250212/VideoLLaMA2/siglip-so400m-patch14-384",
27
  "model_type": "videollama2_qwen2",
28
  "num_attention_heads": 28,
29
  "num_frames": 8,
 
1
  {
2
+ "_name_or_path": "VideoLLaMA2.1-7B-AV-QA",
3
  "architectures": [
4
  "Videollama2Qwen2ForCausalLM"
5
  ],
 
15
  "intermediate_size": 18944,
16
  "max_position_embeddings": 32768,
17
  "max_window_layers": 28,
18
+ "mm_audio_tower": "audio_tower.bin",
19
  "mm_hidden_size": 1152,
20
  "mm_hidden_size_a": 768,
21
  "mm_projector_a_type": "mlp2x_gelu",
 
23
  "mm_projector_type": "stc_connector_v35",
24
  "mm_vision_select_feature": "patch",
25
  "mm_vision_select_layer": -2,
26
+ "mm_vision_tower": "google/siglip-so400m-patch14-384",
27
  "model_type": "videollama2_qwen2",
28
  "num_attention_heads": 28,
29
  "num_frames": 8,