Upload folder using huggingface_hub
Browse files- .gitattributes +2 -0
- config.json +1 -2
- image_encoder.bin +2 -2
- image_encoder.xml +0 -0
- llava_with_past.bin +2 -2
- llava_with_past.xml +0 -0
- model_cache_static/2539958976520683992.blob +3 -0
- model_cache_static/9329712324554992465.blob +3 -0
- token_embed.xml +1 -1
- tokenizer.json +0 -0
- tokenizer_config.json +0 -1
.gitattributes
CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
model_cache_static/2539958976520683992.blob filter=lfs diff=lfs merge=lfs -text
|
37 |
+
model_cache_static/9329712324554992465.blob filter=lfs diff=lfs merge=lfs -text
|
config.json
CHANGED
@@ -8,7 +8,6 @@
|
|
8 |
"eos_token_id": 2,
|
9 |
"feature_outs": "encoder+decoder",
|
10 |
"freeze_mm_mlp_adapter": false,
|
11 |
-
"head_dim": 128,
|
12 |
"hidden_act": "silu",
|
13 |
"hidden_size": 4096,
|
14 |
"image_aspect_ratio": "pad",
|
@@ -38,7 +37,7 @@
|
|
38 |
"tokenizer_model_max_length": 2048,
|
39 |
"tokenizer_padding_side": "right",
|
40 |
"torch_dtype": "bfloat16",
|
41 |
-
"transformers_version": "4.
|
42 |
"tune_mm_mlp_adapter": false,
|
43 |
"tune_vision_tokenizer": "none",
|
44 |
"use_cache": true,
|
|
|
8 |
"eos_token_id": 2,
|
9 |
"feature_outs": "encoder+decoder",
|
10 |
"freeze_mm_mlp_adapter": false,
|
|
|
11 |
"hidden_act": "silu",
|
12 |
"hidden_size": 4096,
|
13 |
"image_aspect_ratio": "pad",
|
|
|
37 |
"tokenizer_model_max_length": 2048,
|
38 |
"tokenizer_padding_side": "right",
|
39 |
"torch_dtype": "bfloat16",
|
40 |
+
"transformers_version": "4.36.2",
|
41 |
"tune_mm_mlp_adapter": false,
|
42 |
"tune_vision_tokenizer": "none",
|
43 |
"use_cache": true,
|
image_encoder.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:a57490fd5b2234f50a815a8a22715ff85161d1202d6d5a0e8a4496b79b67a443
|
3 |
+
size 623777986
|
image_encoder.xml
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
llava_with_past.bin
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:f7997143e42b35d33c9c3697577fdf70fcef53f97fd3275057807db8e52e7f3c
|
3 |
+
size 14238097684
|
llava_with_past.xml
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
model_cache_static/2539958976520683992.blob
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:14564ef56de33f042c65ac266969f162cb37bf16841ecdcdc8e4d2da4e5ab5f2
|
3 |
+
size 628170704
|
model_cache_static/9329712324554992465.blob
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:95ce10c27058096288a7826e02dfd0a56f66bbe73f0671c305e7a9740f1973a1
|
3 |
+
size 14258043692
|
token_embed.xml
CHANGED
@@ -79,7 +79,7 @@
|
|
79 |
</port>
|
80 |
</output>
|
81 |
</layer>
|
82 |
-
<layer id="6" name="
|
83 |
<input>
|
84 |
<port id="0" precision="FP32">
|
85 |
<dim>-1</dim>
|
|
|
79 |
</port>
|
80 |
</output>
|
81 |
</layer>
|
82 |
+
<layer id="6" name="Result_48817" type="Result" version="opset1">
|
83 |
<input>
|
84 |
<port id="0" precision="FP32">
|
85 |
<dim>-1</dim>
|
tokenizer.json
CHANGED
The diff for this file is too large to render.
See raw diff
|
|
tokenizer_config.json
CHANGED
@@ -1,7 +1,6 @@
|
|
1 |
{
|
2 |
"add_bos_token": true,
|
3 |
"add_eos_token": false,
|
4 |
-
"add_prefix_space": true,
|
5 |
"added_tokens_decoder": {
|
6 |
"0": {
|
7 |
"content": "<unk>",
|
|
|
1 |
{
|
2 |
"add_bos_token": true,
|
3 |
"add_eos_token": false,
|
|
|
4 |
"added_tokens_decoder": {
|
5 |
"0": {
|
6 |
"content": "<unk>",
|