lihongjie commited on
Commit
4ad77a2
·
1 Parent(s): 08e93b8

添加chunk prefill模型和axcl算力卡脚本

Browse files
Files changed (48) hide show
  1. .gitattributes +2 -0
  2. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/Qwen2.5-VL-3B-Instruct_vision_nchw448.axmodel +3 -0
  3. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/Qwen2.5-VL-3B-Instruct_vision_nhwc.axmodel +3 -0
  4. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/model.embed_tokens.weight.bfloat16.bin +3 -0
  5. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l0_together.axmodel +3 -0
  6. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l10_together.axmodel +3 -0
  7. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l11_together.axmodel +3 -0
  8. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l12_together.axmodel +3 -0
  9. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l13_together.axmodel +3 -0
  10. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l14_together.axmodel +3 -0
  11. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l15_together.axmodel +3 -0
  12. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l16_together.axmodel +3 -0
  13. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l17_together.axmodel +3 -0
  14. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l18_together.axmodel +3 -0
  15. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l19_together.axmodel +3 -0
  16. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l1_together.axmodel +3 -0
  17. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l20_together.axmodel +3 -0
  18. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l21_together.axmodel +3 -0
  19. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l22_together.axmodel +3 -0
  20. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l23_together.axmodel +3 -0
  21. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l24_together.axmodel +3 -0
  22. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l25_together.axmodel +3 -0
  23. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l26_together.axmodel +3 -0
  24. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l27_together.axmodel +3 -0
  25. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l28_together.axmodel +3 -0
  26. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l29_together.axmodel +3 -0
  27. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l2_together.axmodel +3 -0
  28. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l30_together.axmodel +3 -0
  29. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l31_together.axmodel +3 -0
  30. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l32_together.axmodel +3 -0
  31. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l33_together.axmodel +3 -0
  32. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l34_together.axmodel +3 -0
  33. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l35_together.axmodel +3 -0
  34. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l3_together.axmodel +3 -0
  35. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l4_together.axmodel +3 -0
  36. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l5_together.axmodel +3 -0
  37. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l6_together.axmodel +3 -0
  38. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l7_together.axmodel +3 -0
  39. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l8_together.axmodel +3 -0
  40. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l9_together.axmodel +3 -0
  41. Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_post.axmodel +3 -0
  42. README.md +122 -1
  43. qwen2_tokenizer_image_448.py +19 -4
  44. qwen2_tokenizer_video_308.py +83 -3
  45. run_qwen2_5_vl_image_axcl_aarch64.sh +18 -0
  46. run_qwen2_5_vl_image_axcl_x86.sh +18 -0
  47. run_qwen2_5_vl_video_axcl_aarch64.sh +18 -0
  48. run_qwen2_5_vl_video_axcl_x86.sh +18 -0
.gitattributes CHANGED
@@ -36,3 +36,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
36
  *.axmodel filter=lfs diff=lfs merge=lfs -text
37
  main filter=lfs diff=lfs merge=lfs -text
38
  image/ssd_horse.jpg filter=lfs diff=lfs merge=lfs -text
 
 
 
36
  *.axmodel filter=lfs diff=lfs merge=lfs -text
37
  main filter=lfs diff=lfs merge=lfs -text
38
  image/ssd_horse.jpg filter=lfs diff=lfs merge=lfs -text
39
+ main_axcl_aarch64 filter=lfs diff=lfs merge=lfs -text
40
+ main_axcl_x86 filter=lfs diff=lfs merge=lfs -text
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/Qwen2.5-VL-3B-Instruct_vision_nchw448.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47c6a5c75e3941c49123018f352785dbcbd028dd7d1e741a16c6453f9c9209cf
3
+ size 921254437
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/Qwen2.5-VL-3B-Instruct_vision_nhwc.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:035e9118aa3f106f35f6cf4caa6829a8c97695693b42aea009e43ae39e9b1a59
3
+ size 777801587
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/model.embed_tokens.weight.bfloat16.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b84907567aa829c6f24cadbdeb68c3c44d25fc0a8be8e917fd603cb64f72810d
3
+ size 622329856
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l0_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:51a79521d270e312062df608512706eaa3e259d97ac79514cba3461150a92496
3
+ size 90748908
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l10_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cae58a2b7923870f93540409bd4a8f1c63398ebdaeb0d51905d1f59f2b5aa51f
3
+ size 90755116
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l11_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:60cd4540069957200d622642c91015ff93744b2bc17a9e2494f8e1a3fb78a34d
3
+ size 90756172
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l12_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8c4302238aeeb83ab4ef8f4317e2a4d39917532ba07c46d844a1540b0abac2fb
3
+ size 90756268
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l13_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34f1efba5e7bc27ddb8902e3c4347b881fdd3616e6ad7709f6234cdd81d0a931
3
+ size 90756844
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l14_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6041968d265ecbe039719c0bb31b068d3c124fbda5501a231c0bc73b04806355
3
+ size 90755628
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l15_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc59dd081c621b5f78a80665e5bb50708f665ce8df834b81bb959eaf68203aa6
3
+ size 90754924
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l16_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3403d19c196f2a5a0f86c862b42e79034986edae8dce8cda526a9c58791bd0d0
3
+ size 90756204
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l17_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:42638c3a6d26560aa078110bdd7b34221eaaf874d380d4afffcc5bf064137d9d
3
+ size 90753068
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l18_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b3c87b2b8b35fa4a1c744a96f574c7ccd6672edbbcf28640bab8b015721b7c8c
3
+ size 90755308
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l19_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9505a70f508794afc7b5c1a6524d227e8424b04cbaaec08d5676a637990da1f3
3
+ size 90752876
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l1_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c2741d3af4b13c85f35e596b4c4af194aa489803007a1c4a92f93499a5fdec63
3
+ size 90736556
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l20_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:847399a80afded39d556b153f53483b74b4e17308cb420df1b37b5e64a3c891a
3
+ size 90751788
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l21_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c5d3e45a63ba393186a06184006aafcdea62129b414f5df8e86da40aa764cdf9
3
+ size 90754380
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l22_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a8b09fc38f16380a3565fa430ae1b97a09c74e663cda85c40b1a49b737f4d1f
3
+ size 90756364
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l23_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f736729cd081a16ccbd81d87fc420adf7612baa0a6b35e2f50557b8ccc44f966
3
+ size 90752428
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l24_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4d4c4571d43628a914526fcee5af0aa9ba37f8fb6bd1813b3257c49eb1243a2d
3
+ size 90756204
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l25_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:6eb156c97aab89cae01fe4e1e605c5fef025f9aa5e0f8cfd7c6e04c2ed06bbd3
3
+ size 90754572
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l26_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a5755b87ecd176c77436079c32a19c8623ed015db200e7061c83327ce47db3d0
3
+ size 90755116
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l27_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:35cc4ee732852c91b2fe9d63d158adbed81e780522459bc70ef3075a5b898de2
3
+ size 90752364
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l28_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e7a337e42d0b12ee92bbeecbe9b3e5d7e4580a6e5e9d56ee05147606e2dd4840
3
+ size 90754476
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l29_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0357c4ab39ee73fc410ee703df3fb14f53a75dae04555c4747879af343be7d16
3
+ size 90756588
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l2_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:637456ba1f3a34cfecb227b2c6f1497fcf2e06819ef17d21968fb90972862486
3
+ size 90740012
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l30_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4a9126537341353c715539cab61dc5104cae6fccf3c2e1f63879e1f5b534b3e7
3
+ size 90755180
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l31_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:edbb9a2f10db51dc636ba1690455e7cd36c38cfee0acdea6906705db675a21d9
3
+ size 90753260
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l32_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:823d29dfe1d3ab990d00cd34979faa5b7b02232576f679785bfe9e63e5db3d8c
3
+ size 90754508
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l33_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dc35da5cf2958dfab6ceab4f9125055a8f2f3f4bc06f4eb19f4c682f3a1ead49
3
+ size 90756492
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l34_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:29a39b7f7595a0ab9366a8ea7c7f02fd23689cca03906e86ade07de7a71648a3
3
+ size 90757612
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l35_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f578c09e0e5570af1807c95076f2f4da430ce094c01a90409a1801c43e4c6c97
3
+ size 90756812
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l3_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c19d25d84398b51fcb06b6450e36b8272bd71a02e7cf98368fcec9386608c3cd
3
+ size 90748940
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l4_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cc453a3db86348b7ff30029dadb00f7b050aa19b7c705b836d616521806375d9
3
+ size 90747148
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l5_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:3fed937c6a11c7a561a5b53f357d2e21c8693b04b5799f10e78d7ce65ab036db
3
+ size 90753676
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l6_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:901709229028a2537434fb6b9649466b0a667118e8536b72bd9751496ef1d486
3
+ size 90754828
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l7_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d01a13ede4f8af89726c63087c598f9e225139a2ddba5e016347385ddf1ce2ee
3
+ size 90756524
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l8_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90d9eb43359555d0fe4813e29aa8a17380b1cbba99f686ce5b7c5fbdc6635933
3
+ size 90757132
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_p128_l9_together.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:67f8801cef3bd233b806acdb373598efc493cdf5f1e364944dce9b35fcd46a13
3
+ size 90756908
Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512/qwen2_5_vl_post.axmodel ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:87c2abcce5f1701f751c57848fa50cdce35e06a84d6df12816291e1216fcd1af
3
+ size 339965928
README.md CHANGED
@@ -62,6 +62,8 @@ root@ax650:/mnt/qtang/llm-test/qwen2.5-vl-3b# tree -L 2
62
  ├── image
63
  │   └── ssd_car.jpg
64
  ├── main
 
 
65
  ├── python
66
  │   ├── cv_resize.py
67
  │   ├── infer_image.py
@@ -97,6 +99,10 @@ root@ax650:/mnt/qtang/llm-test/qwen2.5-vl-3b# tree -L 2
97
  ├── qwen2_tokenizer_video_308.py
98
  ├── run_qwen2_5_vl_image.sh
99
  ├── run_qwen2_5_vl_video.sh
 
 
 
 
100
  └── video
101
  ├── frame_0075.jpg
102
  ......
@@ -210,4 +216,119 @@ video/frame_0056.jpg
210
  #### Inference with M.2 Accelerator card
211
  What is M.2 Accelerator card?, Show this DEMO based on Raspberry PI 5.
212
 
213
- TODO
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
  ├── image
63
  │   └── ssd_car.jpg
64
  ├── main
65
+ ├── main_axcl_x86
66
+ ├── main_axcl_aarch64
67
  ├── python
68
  │   ├── cv_resize.py
69
  │   ├── infer_image.py
 
99
  ├── qwen2_tokenizer_video_308.py
100
  ├── run_qwen2_5_vl_image.sh
101
  ├── run_qwen2_5_vl_video.sh
102
+ ├── run_qwen2_5_vl_image_axcl_x86.sh
103
+ ├── run_qwen2_5_vl_image_axcl_aarch64.sh
104
+ ├── run_qwen2_5_vl_video_axcl_x86.sh
105
+ ├── run_qwen2_5_vl_video_axcl_aarch64.sh
106
  └── video
107
  ├── frame_0075.jpg
108
  ......
 
216
  #### Inference with M.2 Accelerator card
217
  What is M.2 Accelerator card?, Show this DEMO based on Raspberry PI 5.
218
 
219
+ #### Image understand demo
220
+
221
+ ##### start tokenizer server for image understand demo
222
+
223
+ ```
224
+ python3 qwen2_tokenizer_image_448.py --port 12345
225
+ ```
226
+
227
+ ##### run image understand demo
228
+
229
+ - input text
230
+
231
+ ```
232
+ 描述这张图片
233
+ ```
234
+
235
+ - input image
236
+
237
+ ![](./image/ssd_car.jpg)
238
+
239
+ ```
240
+ (base) axera@raspberrypi:~/lhj/Qwen2.5-VL-3B-Instruct $ bash run_qwen2_5_vl_image_axcl_aarch64.sh
241
+ [I][ Init][ 162]: LLM init start
242
+ [I][ Init][ 34]: connect http://127.0.0.1:12345 ok
243
+ [I][ Init][ 267]: IMAGE_CONTEXT_TOKEN: 151655, IMAGE_START_TOKEN: 151652
244
+ [I][ Init][ 328]: image encoder output float32
245
+
246
+ [I][ Init][ 340]: max_token_len : 1023
247
+ [I][ Init][ 343]: kv_cache_size : 256, kv_cache_num: 1023
248
+ [I][ Init][ 351]: prefill_token_num : 128
249
+ [I][ Init][ 355]: grp: 1, prefill_max_token_num : 1
250
+ [I][ Init][ 355]: grp: 2, prefill_max_token_num : 128
251
+ [I][ Init][ 355]: grp: 3, prefill_max_token_num : 256
252
+ [I][ Init][ 355]: grp: 4, prefill_max_token_num : 384
253
+ [I][ Init][ 355]: grp: 5, prefill_max_token_num : 512
254
+ [I][ Init][ 359]: prefill_max_token_num : 512
255
+ ________________________
256
+ | ID| remain cmm(MB)|
257
+ ========================
258
+ | 0| 2286|
259
+ ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯
260
+ [E][ load_config][ 278]: config file(post_config.json) open failed
261
+ [W][ Init][ 452]: load postprocess config(post_config.json) failed
262
+ [I][ Init][ 456]: LLM init ok
263
+ Type "q" to exit, Ctrl+c to stop current running
264
+ prompt >> 描述这张图片
265
+ image >> image/ssd_car.jpg
266
+ [I][ Encode][ 539]: image encode time : 772.851990 ms, size : 524288
267
+ [I][ Run][ 625]: input token num : 280, prefill_split_num : 3
268
+ [I][ Run][ 659]: input_num_token:128
269
+ [I][ Run][ 659]: input_num_token:128
270
+ [I][ Run][ 659]: input_num_token:24
271
+ [I][ Run][ 796]: ttft: 2067.18 ms
272
+ 这张图片展示了一条繁忙的城市街道。前景中,一名女子站在人行道上,穿着黑色外套,面带微笑。她旁边是一辆红色的双层巴士,巴士上有一个广告,上面写着“THINGS GET MORE EXITING WHEN YOU SAY ‘YES’ VirginMoney.co.uk”。巴士的车牌号是“L15”。巴士旁边停着一辆黑色的面包车。背景中可以看到一些商店和行人,街道两旁有路灯和商店的招牌。整体环境显得非常繁忙和现代。
273
+
274
+ [N][ Run][ 949]: hit eos,avg 4.12 token/s
275
+ ```
276
+
277
+ #### Video understand demo
278
+
279
+ Please pre-process the image of the video file into a 308x308 size picture
280
+
281
+ ##### start tokenizer server for image understand demo
282
+
283
+ ```
284
+ python qwen2_tokenizer_video_308.py --port 12345
285
+ ```
286
+
287
+ ##### run image understand demo
288
+
289
+ ```
290
+ (base) axera@raspberrypi:~/lhj/Qwen2.5-VL-3B-Instruct $ bash run_qwen2_5_vl_video_axcl_aarch64.sh
291
+ [I][ Init][ 162]: LLM init start
292
+ [I][ Init][ 34]: connect http://127.0.0.1:12345 ok
293
+ [I][ Init][ 267]: IMAGE_CONTEXT_TOKEN: 151656, IMAGE_START_TOKEN: 151652
294
+ [I][ Init][ 328]: image encoder output float32
295
+
296
+ [I][ Init][ 340]: max_token_len : 1023
297
+ [I][ Init][ 343]: kv_cache_size : 256, kv_cache_num: 1023
298
+ [I][ Init][ 351]: prefill_token_num : 128
299
+ [I][ Init][ 355]: grp: 1, prefill_max_token_num : 1
300
+ [I][ Init][ 355]: grp: 2, prefill_max_token_num : 128
301
+ [I][ Init][ 355]: grp: 3, prefill_max_token_num : 256
302
+ [I][ Init][ 355]: grp: 4, prefill_max_token_num : 384
303
+ [I][ Init][ 355]: grp: 5, prefill_max_token_num : 512
304
+ [I][ Init][ 359]: prefill_max_token_num : 512
305
+ ________________________
306
+ | ID| remain cmm(MB)|
307
+ ========================
308
+ | 0| 2464|
309
+ ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯
310
+ [E][ load_config][ 278]: config file(post_config.json) open failed
311
+ [W][ Init][ 452]: load postprocess config(post_config.json) failed
312
+ [I][ Init][ 456]: LLM init ok
313
+ Type "q" to exit, Ctrl+c to stop current running
314
+ prompt >> 描述这个视频的内容
315
+ image >> video
316
+ video/frame_0000.jpg
317
+ video/frame_0008.jpg
318
+ video/frame_0016.jpg
319
+ video/frame_0024.jpg
320
+ video/frame_0032.jpg
321
+ video/frame_0040.jpg
322
+ video/frame_0048.jpg
323
+ video/frame_0056.jpg
324
+ [I][ Encode][ 539]: image encode time : 1481.107056 ms, size : 991232
325
+ [I][ Run][ 625]: input token num : 509, prefill_split_num : 4
326
+ [I][ Run][ 659]: input_num_token:128
327
+ [I][ Run][ 659]: input_num_token:128
328
+ [I][ Run][ 659]: input_num_token:128
329
+ [I][ Run][ 659]: input_num_token:125
330
+ [I][ Run][ 796]: ttft: 3049.59 ms
331
+ 视频展示了两只松鼠在户外的场景。背景是模糊的山脉和蓝天,前景中有松鼠在互动。松鼠的毛色是棕色和灰色的混合,它们的爪子是橙色的。松鼠似乎在互相玩耍或争抢,它们的爪子和嘴巴都伸向对方。整个场景显得非常自然和生动。
332
+
333
+ [N][ Run][ 949]: hit eos,avg 4.15 token/s
334
+ ```
qwen2_tokenizer_image_448.py CHANGED
@@ -87,10 +87,6 @@ class Tokenizer_Http():
87
 
88
  # official implementation
89
  text = f'<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\n<|vision_start|>' + '<|image_pad|>' * 256 + f'<|vision_end|>{content}<|im_end|>\n<|im_start|>assistant\n'
90
-
91
- # better for quantation model
92
- # text = f'<|im_start|>user\n{content}<|im_end|>\n<|im_start|>user\n<|vision_start|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|image_pad|><|vision_end|><|im_end|>\n<|im_start|>assistant\n'
93
-
94
 
95
  output_kwargs = {'text_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'images_kwargs': {'return_tensors': 'pt'}, 'audio_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'videos_kwargs': {'fps': 2.0, 'return_tensors': 'pt'}, 'common_kwargs': {'return_tensors': 'pt'}}
96
 
@@ -117,6 +113,13 @@ class Tokenizer_Http():
117
  def eos_token(self):
118
  return self.tokenizer.eos_token
119
 
 
 
 
 
 
 
 
120
 
121
  tokenizer = Tokenizer_Http()
122
 
@@ -164,6 +167,18 @@ class Request(BaseHTTPRequestHandler):
164
  msg = json.dumps({'eos_id': -1})
165
  else:
166
  msg = json.dumps({'eos_id': eos_id})
 
 
 
 
 
 
 
 
 
 
 
 
167
  else:
168
  msg = 'error'
169
 
 
87
 
88
  # official implementation
89
  text = f'<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\n<|vision_start|>' + '<|image_pad|>' * 256 + f'<|vision_end|>{content}<|im_end|>\n<|im_start|>assistant\n'
 
 
 
 
90
 
91
  output_kwargs = {'text_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'images_kwargs': {'return_tensors': 'pt'}, 'audio_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'videos_kwargs': {'fps': 2.0, 'return_tensors': 'pt'}, 'common_kwargs': {'return_tensors': 'pt'}}
92
 
 
113
  def eos_token(self):
114
  return self.tokenizer.eos_token
115
 
116
+ @property
117
+ def img_start_token(self):
118
+ return self.tokenizer.encode("<|vision_start|>")[0]
119
+
120
+ @property
121
+ def img_context_token(self):
122
+ return self.tokenizer.encode("<|image_pad|>")[0]
123
 
124
  tokenizer = Tokenizer_Http()
125
 
 
167
  msg = json.dumps({'eos_id': -1})
168
  else:
169
  msg = json.dumps({'eos_id': eos_id})
170
+ elif self.path == '/img_start_token':
171
+ img_start_token = tokenizer.img_start_token
172
+ if img_start_token is None:
173
+ msg = json.dumps({'img_start_token': -1})
174
+ else:
175
+ msg = json.dumps({'img_start_token': img_start_token})
176
+ elif self.path == '/img_context_token':
177
+ img_context_token = tokenizer.img_context_token
178
+ if img_context_token is None:
179
+ msg = json.dumps({'img_context_token': -1})
180
+ else:
181
+ msg = json.dumps({'img_context_token': img_context_token})
182
  else:
183
  msg = 'error'
184
 
qwen2_tokenizer_video_308.py CHANGED
@@ -4,6 +4,69 @@ from http.server import HTTPServer, BaseHTTPRequestHandler
4
  import json
5
  import argparse
6
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
7
 
8
  class Tokenizer_Http():
9
 
@@ -24,9 +87,7 @@ class Tokenizer_Http():
24
  # official implementation
25
  text = f"<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\n<|vision_start|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|vision_end|>{content}<|im_end|>\n<|im_start|>assistant\n"
26
 
27
- # better for quantation model
28
- # text = f"<|im_start|>user\n{content}<|im_end|>\n<|im_start|>user\n<|vision_start|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|vision_end|><|im_end|>\n<|im_start|>assistant\n"
29
-
30
  output_kwargs = {'text_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'images_kwargs': {'return_tensors': 'pt'}, 'audio_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'videos_kwargs': {'return_tensors': 'pt'}, 'common_kwargs': {'return_tensors': 'pt'}}
31
 
32
  text_inputs = self.tokenizer(text, **output_kwargs["text_kwargs"])
@@ -52,6 +113,13 @@ class Tokenizer_Http():
52
  def eos_token(self):
53
  return self.tokenizer.eos_token
54
 
 
 
 
 
 
 
 
55
 
56
  tokenizer = Tokenizer_Http()
57
 
@@ -99,6 +167,18 @@ class Request(BaseHTTPRequestHandler):
99
  msg = json.dumps({'eos_id': -1})
100
  else:
101
  msg = json.dumps({'eos_id': eos_id})
 
 
 
 
 
 
 
 
 
 
 
 
102
  else:
103
  msg = 'error'
104
 
 
4
  import json
5
  import argparse
6
 
7
+ def _prompt_split_image(
8
+ image_seq_len,
9
+ image_rows,
10
+ image_cols,
11
+ fake_token_around_image,
12
+ image_token,
13
+ global_img_token,
14
+ ):
15
+ """Prompt with expanded image tokens for when the image is split into patches."""
16
+ text_split_images = ""
17
+ for n_h in range(image_rows):
18
+ for n_w in range(image_cols):
19
+ text_split_images += (
20
+ f"{fake_token_around_image}"
21
+ + f"<row_{n_h + 1}_col_{n_w + 1}>"
22
+ + f"{image_token}" * image_seq_len
23
+ )
24
+ text_split_images += "\n"
25
+
26
+ text_split_images += (
27
+ f"\n{fake_token_around_image}"
28
+ + f"{global_img_token}"
29
+ + f"{image_token}" * image_seq_len
30
+ + f"{fake_token_around_image}"
31
+ )
32
+ return text_split_images
33
+
34
+
35
+ def _prompt_single_image(
36
+ image_seq_len, fake_token_around_image, image_token, global_img_token
37
+ ):
38
+ """Prompt with expanded image tokens for a single image."""
39
+ return (
40
+ f"{fake_token_around_image}"
41
+ + f"{global_img_token}"
42
+ + f"{image_token}" * image_seq_len
43
+ + f"{fake_token_around_image}"
44
+ )
45
+
46
+
47
+ def get_image_prompt_string(
48
+ image_rows,
49
+ image_cols,
50
+ image_seq_len,
51
+ fake_token_around_image,
52
+ image_token,
53
+ global_img_token,
54
+ ):
55
+ if image_rows == 0 and image_cols == 0:
56
+ return _prompt_single_image(
57
+ image_seq_len,
58
+ fake_token_around_image=fake_token_around_image,
59
+ image_token=image_token,
60
+ global_img_token=global_img_token,
61
+ )
62
+ return _prompt_split_image(
63
+ image_seq_len,
64
+ image_rows,
65
+ image_cols,
66
+ fake_token_around_image,
67
+ image_token,
68
+ global_img_token,
69
+ )
70
 
71
  class Tokenizer_Http():
72
 
 
87
  # official implementation
88
  text = f"<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n<|im_start|>user\n<|vision_start|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|video_pad|><|vision_end|>{content}<|im_end|>\n<|im_start|>assistant\n"
89
 
90
+
 
 
91
  output_kwargs = {'text_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'images_kwargs': {'return_tensors': 'pt'}, 'audio_kwargs': {'padding': True, 'return_tensors': 'pt'}, 'videos_kwargs': {'return_tensors': 'pt'}, 'common_kwargs': {'return_tensors': 'pt'}}
92
 
93
  text_inputs = self.tokenizer(text, **output_kwargs["text_kwargs"])
 
113
  def eos_token(self):
114
  return self.tokenizer.eos_token
115
 
116
+ @property
117
+ def img_start_token(self):
118
+ return self.tokenizer.encode("<|vision_start|>")[0]
119
+
120
+ @property
121
+ def img_context_token(self):
122
+ return self.tokenizer.encode("<|video_pad|>")[0]
123
 
124
  tokenizer = Tokenizer_Http()
125
 
 
167
  msg = json.dumps({'eos_id': -1})
168
  else:
169
  msg = json.dumps({'eos_id': eos_id})
170
+ elif self.path == '/img_start_token':
171
+ img_start_token = tokenizer.img_start_token
172
+ if img_start_token is None:
173
+ msg = json.dumps({'img_start_token': -1})
174
+ else:
175
+ msg = json.dumps({'img_start_token': img_start_token})
176
+ elif self.path == '/img_context_token':
177
+ img_context_token = tokenizer.img_context_token
178
+ if img_context_token is None:
179
+ msg = json.dumps({'img_context_token': -1})
180
+ else:
181
+ msg = json.dumps({'img_context_token': img_context_token})
182
  else:
183
  msg = 'error'
184
 
run_qwen2_5_vl_image_axcl_aarch64.sh ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AXMODEL_DIR=./Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512
2
+
3
+ ./main_axcl_aarch64 \
4
+ --template_filename_axmodel "${AXMODEL_DIR}/qwen2_5_vl_p128_l%d_together.axmodel" \
5
+ --axmodel_num 36 \
6
+ --filename_image_encoder_axmodedl "${AXMODEL_DIR}/Qwen2.5-VL-3B-Instruct_vision_nchw448.axmodel" \
7
+ --use_mmap_load_embed 1 \
8
+ --filename_tokenizer_model "http://127.0.0.1:12345" \
9
+ --filename_post_axmodel "${AXMODEL_DIR}/qwen2_5_vl_post.axmodel" \
10
+ --filename_tokens_embed "${AXMODEL_DIR}/model.embed_tokens.weight.bfloat16.bin" \
11
+ --tokens_embed_num 151936 \
12
+ --tokens_embed_size 2048 \
13
+ --live_print 1 \
14
+ --img_width 448 \
15
+ --img_height 448 \
16
+ --vision_start_token_id 151652 \
17
+ --post_config_path post_config.json \
18
+ --devices 0,
run_qwen2_5_vl_image_axcl_x86.sh ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AXMODEL_DIR=./Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512
2
+
3
+ ./main_axcl_x86 \
4
+ --template_filename_axmodel "${AXMODEL_DIR}/qwen2_5_vl_p128_l%d_together.axmodel" \
5
+ --axmodel_num 36 \
6
+ --filename_image_encoder_axmodedl "${AXMODEL_DIR}/Qwen2.5-VL-3B-Instruct_vision_nchw448.axmodel" \
7
+ --use_mmap_load_embed 1 \
8
+ --filename_tokenizer_model "http://127.0.0.1:12345" \
9
+ --filename_post_axmodel "${AXMODEL_DIR}/qwen2_5_vl_post.axmodel" \
10
+ --filename_tokens_embed "${AXMODEL_DIR}/model.embed_tokens.weight.bfloat16.bin" \
11
+ --tokens_embed_num 151936 \
12
+ --tokens_embed_size 2048 \
13
+ --live_print 1 \
14
+ --img_width 448 \
15
+ --img_height 448 \
16
+ --vision_start_token_id 151652 \
17
+ --post_config_path post_config.json \
18
+ --devices 0,
run_qwen2_5_vl_video_axcl_aarch64.sh ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AXMODEL_DIR=./Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512
2
+
3
+ ./main_axcl_aarch64 \
4
+ --template_filename_axmodel "${AXMODEL_DIR}/qwen2_5_vl_p128_l%d_together.axmodel" \
5
+ --axmodel_num 36 \
6
+ --filename_image_encoder_axmodedl "${AXMODEL_DIR}/Qwen2.5-VL-3B-Instruct_vision_nhwc.axmodel" \
7
+ --use_mmap_load_embed 1 \
8
+ --filename_tokenizer_model "http://127.0.0.1:12345" \
9
+ --filename_post_axmodel "${AXMODEL_DIR}/qwen2_5_vl_post.axmodel" \
10
+ --filename_tokens_embed "${AXMODEL_DIR}/model.embed_tokens.weight.bfloat16.bin" \
11
+ --tokens_embed_num 151936 \
12
+ --tokens_embed_size 2048 \
13
+ --live_print 1 \
14
+ --img_width 308 \
15
+ --img_height 308 \
16
+ --vision_start_token_id 151652 \
17
+ --post_config_path post_config.json \
18
+ --devices 0,
run_qwen2_5_vl_video_axcl_x86.sh ADDED
@@ -0,0 +1,18 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ AXMODEL_DIR=./Qwen2.5-VL-3B-Instruct-AX650-chunk_prefill_512
2
+
3
+ ./main_axcl_x86 \
4
+ --template_filename_axmodel "${AXMODEL_DIR}/qwen2_5_vl_p128_l%d_together.axmodel" \
5
+ --axmodel_num 36 \
6
+ --filename_image_encoder_axmodedl "${AXMODEL_DIR}/Qwen2.5-VL-3B-Instruct_vision_nhwc.axmodel" \
7
+ --use_mmap_load_embed 1 \
8
+ --filename_tokenizer_model "http://127.0.0.1:12345" \
9
+ --filename_post_axmodel "${AXMODEL_DIR}/qwen2_5_vl_post.axmodel" \
10
+ --filename_tokens_embed "${AXMODEL_DIR}/model.embed_tokens.weight.bfloat16.bin" \
11
+ --tokens_embed_num 151936 \
12
+ --tokens_embed_size 2048 \
13
+ --live_print 1 \
14
+ --img_width 308 \
15
+ --img_height 308 \
16
+ --vision_start_token_id 151652 \
17
+ --post_config_path post_config.json \
18
+ --devices 0,