Upload 12 files
Browse files- .gitattributes +1 -0
- config.json +27 -0
- generation_config.json +6 -0
- openvino_config.json +28 -0
- openvino_detokenizer.bin +3 -0
- openvino_detokenizer.xml +183 -0
- openvino_model.bin +3 -0
- openvino_model.xml +0 -0
- openvino_tokenizer.bin +3 -0
- openvino_tokenizer.xml +777 -0
- special_tokens_map.json +23 -0
- tokenizer.json +3 -0
- tokenizer_config.json +0 -0
.gitattributes
CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
36 |
+
tokenizer.json filter=lfs diff=lfs merge=lfs -text
|
config.json
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_name_or_path": "/media/ecomm/c0889304-9e30-4f04-b290-c7db463872c6/Models/Pytorch/Ministral-8B-Instruct-2410-HF/",
|
3 |
+
"architectures": [
|
4 |
+
"MistralForCausalLM"
|
5 |
+
],
|
6 |
+
"attention_dropout": 0.0,
|
7 |
+
"bos_token_id": 1,
|
8 |
+
"eos_token_id": 2,
|
9 |
+
"head_dim": 128,
|
10 |
+
"hidden_act": "silu",
|
11 |
+
"hidden_size": 4096,
|
12 |
+
"initializer_range": 0.02,
|
13 |
+
"intermediate_size": 12288,
|
14 |
+
"max_position_embeddings": 32768,
|
15 |
+
"model_type": "mistral",
|
16 |
+
"num_attention_heads": 32,
|
17 |
+
"num_hidden_layers": 36,
|
18 |
+
"num_key_value_heads": 8,
|
19 |
+
"rms_norm_eps": 1e-05,
|
20 |
+
"rope_theta": 100000000.0,
|
21 |
+
"sliding_window": 32768,
|
22 |
+
"tie_word_embeddings": false,
|
23 |
+
"torch_dtype": "bfloat16",
|
24 |
+
"transformers_version": "4.46.3",
|
25 |
+
"use_cache": true,
|
26 |
+
"vocab_size": 131072
|
27 |
+
}
|
generation_config.json
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"_from_model_config": true,
|
3 |
+
"bos_token_id": 1,
|
4 |
+
"eos_token_id": 2,
|
5 |
+
"transformers_version": "4.46.3"
|
6 |
+
}
|
openvino_config.json
ADDED
@@ -0,0 +1,28 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"compression": null,
|
3 |
+
"dtype": "int4",
|
4 |
+
"input_info": null,
|
5 |
+
"optimum_version": "1.24.0.dev0",
|
6 |
+
"quantization_config": {
|
7 |
+
"all_layers": null,
|
8 |
+
"backup_precision": null,
|
9 |
+
"bits": 4,
|
10 |
+
"dataset": "wikitext2",
|
11 |
+
"gptq": null,
|
12 |
+
"group_size": 128,
|
13 |
+
"ignored_scope": null,
|
14 |
+
"lora_correction": null,
|
15 |
+
"num_samples": null,
|
16 |
+
"processor": null,
|
17 |
+
"quant_method": "awq",
|
18 |
+
"ratio": 1.0,
|
19 |
+
"scale_estimation": null,
|
20 |
+
"sensitivity_metric": null,
|
21 |
+
"sym": false,
|
22 |
+
"tokenizer": null,
|
23 |
+
"trust_remote_code": false,
|
24 |
+
"weight_format": "int4"
|
25 |
+
},
|
26 |
+
"save_onnx_model": false,
|
27 |
+
"transformers_version": "4.46.3"
|
28 |
+
}
|
openvino_detokenizer.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:85b66e1463fe90698f45b5d9bcde7f22cb67f8ca732c25b5cc7afcd55fe5f339
|
3 |
+
size 1415405
|
openvino_detokenizer.xml
ADDED
@@ -0,0 +1,183 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="detokenizer" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="Parameter_146872" type="Parameter" version="opset1">
|
5 |
+
<data shape="?,?" element_type="i64" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="I64" names="Parameter_146872">
|
8 |
+
<dim>-1</dim>
|
9 |
+
<dim>-1</dim>
|
10 |
+
</port>
|
11 |
+
</output>
|
12 |
+
</layer>
|
13 |
+
<layer id="1" name="Convert_146883" type="Convert" version="opset1">
|
14 |
+
<data destination_type="i32" />
|
15 |
+
<input>
|
16 |
+
<port id="0" precision="I64">
|
17 |
+
<dim>-1</dim>
|
18 |
+
<dim>-1</dim>
|
19 |
+
</port>
|
20 |
+
</input>
|
21 |
+
<output>
|
22 |
+
<port id="1" precision="I32">
|
23 |
+
<dim>-1</dim>
|
24 |
+
<dim>-1</dim>
|
25 |
+
</port>
|
26 |
+
</output>
|
27 |
+
</layer>
|
28 |
+
<layer id="2" name="Constant_146839" type="Const" version="opset1">
|
29 |
+
<data element_type="u8" shape="1415405" offset="0" size="1415405" />
|
30 |
+
<output>
|
31 |
+
<port id="0" precision="U8">
|
32 |
+
<dim>1415405</dim>
|
33 |
+
</port>
|
34 |
+
</output>
|
35 |
+
</layer>
|
36 |
+
<layer id="3" name="StringTensorUnpack_146840" type="StringTensorUnpack" version="extension">
|
37 |
+
<data mode="begins_ends" />
|
38 |
+
<input>
|
39 |
+
<port id="0" precision="U8">
|
40 |
+
<dim>1415405</dim>
|
41 |
+
</port>
|
42 |
+
</input>
|
43 |
+
<output>
|
44 |
+
<port id="1" precision="I32">
|
45 |
+
<dim>-1</dim>
|
46 |
+
</port>
|
47 |
+
<port id="2" precision="I32">
|
48 |
+
<dim>-1</dim>
|
49 |
+
</port>
|
50 |
+
<port id="3" precision="U8">
|
51 |
+
<dim>-1</dim>
|
52 |
+
</port>
|
53 |
+
</output>
|
54 |
+
</layer>
|
55 |
+
<layer id="4" name="VocabDecoder_146873" type="VocabDecoder" version="extension">
|
56 |
+
<data skip_tokens="0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, 253, 254, 255, 256, 257, 258, 259, 260, 261, 262, 263, 264, 265, 266, 267, 268, 269, 270, 271, 272, 273, 274, 275, 276, 277, 278, 279, 280, 281, 282, 283, 284, 285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298, 299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, 313, 314, 315, 316, 317, 318, 319, 320, 321, 322, 323, 324, 325, 326, 327, 328, 329, 330, 331, 332, 333, 334, 335, 336, 337, 338, 339, 340, 341, 342, 343, 344, 345, 346, 347, 348, 349, 350, 351, 352, 353, 354, 355, 356, 357, 358, 359, 360, 361, 362, 363, 364, 365, 366, 367, 368, 369, 370, 371, 372, 373, 374, 375, 376, 377, 378, 379, 380, 381, 382, 383, 384, 385, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 396, 397, 398, 399, 400, 401, 402, 403, 404, 405, 406, 407, 408, 409, 410, 411, 412, 413, 414, 415, 416, 417, 418, 419, 420, 421, 422, 423, 424, 425, 426, 427, 428, 429, 430, 431, 432, 433, 434, 435, 436, 437, 438, 439, 440, 441, 442, 443, 444, 445, 446, 447, 448, 449, 450, 451, 452, 453, 454, 455, 456, 457, 458, 459, 460, 461, 462, 463, 464, 465, 466, 467, 468, 469, 470, 471, 472, 473, 474, 475, 476, 477, 478, 479, 480, 481, 482, 483, 484, 485, 486, 487, 488, 489, 490, 491, 492, 493, 494, 495, 496, 497, 498, 499, 500, 501, 502, 503, 504, 505, 506, 507, 508, 509, 510, 511, 512, 513, 514, 515, 516, 517, 518, 519, 520, 521, 522, 523, 524, 525, 526, 527, 528, 529, 530, 531, 532, 533, 534, 535, 536, 537, 538, 539, 540, 541, 542, 543, 544, 545, 546, 547, 548, 549, 550, 551, 552, 553, 554, 555, 556, 557, 558, 559, 560, 561, 562, 563, 564, 565, 566, 567, 568, 569, 570, 571, 572, 573, 574, 575, 576, 577, 578, 579, 580, 581, 582, 583, 584, 585, 586, 587, 588, 589, 590, 591, 592, 593, 594, 595, 596, 597, 598, 599, 600, 601, 602, 603, 604, 605, 606, 607, 608, 609, 610, 611, 612, 613, 614, 615, 616, 617, 618, 619, 620, 621, 622, 623, 624, 625, 626, 627, 628, 629, 630, 631, 632, 633, 634, 635, 636, 637, 638, 639, 640, 641, 642, 643, 644, 645, 646, 647, 648, 649, 650, 651, 652, 653, 654, 655, 656, 657, 658, 659, 660, 661, 662, 663, 664, 665, 666, 667, 668, 669, 670, 671, 672, 673, 674, 675, 676, 677, 678, 679, 680, 681, 682, 683, 684, 685, 686, 687, 688, 689, 690, 691, 692, 693, 694, 695, 696, 697, 698, 699, 700, 701, 702, 703, 704, 705, 706, 707, 708, 709, 710, 711, 712, 713, 714, 715, 716, 717, 718, 719, 720, 721, 722, 723, 724, 725, 726, 727, 728, 729, 730, 731, 732, 733, 734, 735, 736, 737, 738, 739, 740, 741, 742, 743, 744, 745, 746, 747, 748, 749, 750, 751, 752, 753, 754, 755, 756, 757, 758, 759, 760, 761, 762, 763, 764, 765, 766, 767, 768, 769, 770, 771, 772, 773, 774, 775, 776, 777, 778, 779, 780, 781, 782, 783, 784, 785, 786, 787, 788, 789, 790, 791, 792, 793, 794, 795, 796, 797, 798, 799, 800, 801, 802, 803, 804, 805, 806, 807, 808, 809, 810, 811, 812, 813, 814, 815, 816, 817, 818, 819, 820, 821, 822, 823, 824, 825, 826, 827, 828, 829, 830, 831, 832, 833, 834, 835, 836, 837, 838, 839, 840, 841, 842, 843, 844, 845, 846, 847, 848, 849, 850, 851, 852, 853, 854, 855, 856, 857, 858, 859, 860, 861, 862, 863, 864, 865, 866, 867, 868, 869, 870, 871, 872, 873, 874, 875, 876, 877, 878, 879, 880, 881, 882, 883, 884, 885, 886, 887, 888, 889, 890, 891, 892, 893, 894, 895, 896, 897, 898, 899, 900, 901, 902, 903, 904, 905, 906, 907, 908, 909, 910, 911, 912, 913, 914, 915, 916, 917, 918, 919, 920, 921, 922, 923, 924, 925, 926, 927, 928, 929, 930, 931, 932, 933, 934, 935, 936, 937, 938, 939, 940, 941, 942, 943, 944, 945, 946, 947, 948, 949, 950, 951, 952, 953, 954, 955, 956, 957, 958, 959, 960, 961, 962, 963, 964, 965, 966, 967, 968, 969, 970, 971, 972, 973, 974, 975, 976, 977, 978, 979, 980, 981, 982, 983, 984, 985, 986, 987, 988, 989, 990, 991, 992, 993, 994, 995, 996, 997, 998, 999" />
|
57 |
+
<input>
|
58 |
+
<port id="0" precision="I32">
|
59 |
+
<dim>-1</dim>
|
60 |
+
<dim>-1</dim>
|
61 |
+
</port>
|
62 |
+
<port id="1" precision="I32">
|
63 |
+
<dim>-1</dim>
|
64 |
+
</port>
|
65 |
+
<port id="2" precision="I32">
|
66 |
+
<dim>-1</dim>
|
67 |
+
</port>
|
68 |
+
<port id="3" precision="U8">
|
69 |
+
<dim>-1</dim>
|
70 |
+
</port>
|
71 |
+
</input>
|
72 |
+
<output>
|
73 |
+
<port id="4" precision="I32">
|
74 |
+
<dim>-1</dim>
|
75 |
+
</port>
|
76 |
+
<port id="5" precision="I32">
|
77 |
+
<dim>-1</dim>
|
78 |
+
</port>
|
79 |
+
<port id="6" precision="I32">
|
80 |
+
<dim>-1</dim>
|
81 |
+
</port>
|
82 |
+
<port id="7" precision="I32">
|
83 |
+
<dim>-1</dim>
|
84 |
+
</port>
|
85 |
+
<port id="8" precision="U8">
|
86 |
+
<dim>-1</dim>
|
87 |
+
</port>
|
88 |
+
</output>
|
89 |
+
</layer>
|
90 |
+
<layer id="5" name="FuzeRagged_146874" type="FuzeRagged" version="extension">
|
91 |
+
<input>
|
92 |
+
<port id="0" precision="I32">
|
93 |
+
<dim>-1</dim>
|
94 |
+
</port>
|
95 |
+
<port id="1" precision="I32">
|
96 |
+
<dim>-1</dim>
|
97 |
+
</port>
|
98 |
+
<port id="2" precision="I32">
|
99 |
+
<dim>-1</dim>
|
100 |
+
</port>
|
101 |
+
<port id="3" precision="I32">
|
102 |
+
<dim>-1</dim>
|
103 |
+
</port>
|
104 |
+
</input>
|
105 |
+
<output>
|
106 |
+
<port id="4" precision="I32">
|
107 |
+
<dim>-1</dim>
|
108 |
+
</port>
|
109 |
+
<port id="5" precision="I32">
|
110 |
+
<dim>-1</dim>
|
111 |
+
</port>
|
112 |
+
</output>
|
113 |
+
</layer>
|
114 |
+
<layer id="6" name="StringTensorPack_146875" type="StringTensorPack" version="extension">
|
115 |
+
<data mode="begins_ends" />
|
116 |
+
<input>
|
117 |
+
<port id="0" precision="I32">
|
118 |
+
<dim>-1</dim>
|
119 |
+
</port>
|
120 |
+
<port id="1" precision="I32">
|
121 |
+
<dim>-1</dim>
|
122 |
+
</port>
|
123 |
+
<port id="2" precision="U8">
|
124 |
+
<dim>-1</dim>
|
125 |
+
</port>
|
126 |
+
</input>
|
127 |
+
<output>
|
128 |
+
<port id="3" precision="STRING" names="string_output">
|
129 |
+
<dim>-1</dim>
|
130 |
+
</port>
|
131 |
+
</output>
|
132 |
+
</layer>
|
133 |
+
<layer id="7" name="Result_146876" type="Result" version="opset1">
|
134 |
+
<input>
|
135 |
+
<port id="0" precision="STRING">
|
136 |
+
<dim>-1</dim>
|
137 |
+
</port>
|
138 |
+
</input>
|
139 |
+
</layer>
|
140 |
+
</layers>
|
141 |
+
<edges>
|
142 |
+
<edge from-layer="0" from-port="0" to-layer="1" to-port="0" />
|
143 |
+
<edge from-layer="1" from-port="1" to-layer="4" to-port="0" />
|
144 |
+
<edge from-layer="2" from-port="0" to-layer="3" to-port="0" />
|
145 |
+
<edge from-layer="3" from-port="1" to-layer="4" to-port="1" />
|
146 |
+
<edge from-layer="3" from-port="2" to-layer="4" to-port="2" />
|
147 |
+
<edge from-layer="3" from-port="3" to-layer="4" to-port="3" />
|
148 |
+
<edge from-layer="4" from-port="4" to-layer="5" to-port="0" />
|
149 |
+
<edge from-layer="4" from-port="5" to-layer="5" to-port="1" />
|
150 |
+
<edge from-layer="4" from-port="6" to-layer="5" to-port="2" />
|
151 |
+
<edge from-layer="4" from-port="7" to-layer="5" to-port="3" />
|
152 |
+
<edge from-layer="4" from-port="8" to-layer="6" to-port="2" />
|
153 |
+
<edge from-layer="5" from-port="4" to-layer="6" to-port="0" />
|
154 |
+
<edge from-layer="5" from-port="5" to-layer="6" to-port="1" />
|
155 |
+
<edge from-layer="6" from-port="3" to-layer="7" to-port="0" />
|
156 |
+
</edges>
|
157 |
+
<rt_info>
|
158 |
+
<add_attention_mask value="True" />
|
159 |
+
<add_prefix_space />
|
160 |
+
<add_special_tokens value="True" />
|
161 |
+
<bos_token_id value="1" />
|
162 |
+
<chat_template value="{%- if messages[0]["role"] == "system" %} {%- set system_message = messages[0]["content"] %} {%- set loop_messages = messages[1:] %} {%- else %} {%- set loop_messages = messages %} {%- endif %} {%- if not tools is defined %} {%- set tools = none %} {%- endif %} {%- set user_messages = loop_messages | selectattr("role", "equalto", "user") | list %} {#- This block checks for alternating user/assistant messages, skipping tool calling messages #} {%- set ns = namespace() %} {%- set ns.index = 0 %} {%- for message in loop_messages %} {%- if not (message.role == "tool" or message.role == "tool_results" or (message.tool_calls is defined and message.tool_calls is not none)) %} {%- if (message["role"] == "user") != (ns.index % 2 == 0) %} {{- raise_exception("After the optional system message, conversation roles must alternate user/assistant/user/assistant/...") }} {%- endif %} {%- set ns.index = ns.index + 1 %} {%- endif %} {%- endfor %} {{- bos_token }} {%- for message in loop_messages %} {%- if message["role"] == "user" %} {%- if tools is not none and (message == user_messages[-1]) %} {{- "[AVAILABLE_TOOLS][" }} {%- for tool in tools %} {%- set tool = tool.function %} {{- '{"type": "function", "function": {' }} {%- for key, val in tool.items() if key != "return" %} {%- if val is string %} {{- '"' + key + '": "' + val + '"' }} {%- else %} {{- '"' + key + '": ' + val|tojson }} {%- endif %} {%- if not loop.last %} {{- ", " }} {%- endif %} {%- endfor %} {{- "}}" }} {%- if not loop.last %} {{- ", " }} {%- else %} {{- "]" }} {%- endif %} {%- endfor %} {{- "[/AVAILABLE_TOOLS]" }} {%- endif %} {%- if loop.last and system_message is defined %} {{- "[INST]" + system_message + "\n\n" + message["content"] + "[/INST]" }} {%- else %} {{- "[INST]" + message["content"] + "[/INST]" }} {%- endif %} {%- elif (message.tool_calls is defined and message.tool_calls is not none) %} {{- "[TOOL_CALLS][" }} {%- for tool_call in message.tool_calls %} {%- set out = tool_call.function|tojson %} {{- out[:-1] }} {%- if not tool_call.id is defined or tool_call.id|length != 9 %} {{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") }} {%- endif %} {{- ', "id": "' + tool_call.id + '"}' }} {%- if not loop.last %} {{- ", " }} {%- else %} {{- "]" + eos_token }} {%- endif %} {%- endfor %} {%- elif message["role"] == "assistant" %} {{- message["content"] + eos_token}} {%- elif message["role"] == "tool_results" or message["role"] == "tool" %} {%- if message.content is defined and message.content.content is defined %} {%- set content = message.content.content %} {%- else %} {%- set content = message.content %} {%- endif %} {{- '[TOOL_RESULTS]{"content": ' + content|string + ", " }} {%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 %} {{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") }} {%- endif %} {{- '"call_id": "' + message.tool_call_id + '"}[/TOOL_RESULTS]' }} {%- else %} {{- raise_exception("Only user and assistant roles are supported, with the exception of an initial optional system message!") }} {%- endif %} {%- endfor %} " />
|
163 |
+
<clean_up_tokenization_spaces />
|
164 |
+
<detokenizer_input_type value="i64" />
|
165 |
+
<eos_token_id value="2" />
|
166 |
+
<handle_special_tokens_with_re />
|
167 |
+
<number_of_inputs value="1" />
|
168 |
+
<openvino_tokenizers_version value="2024.5.0.0" />
|
169 |
+
<openvino_version value="2024.5.0" />
|
170 |
+
<original_tokenizer_class value="<class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
|
171 |
+
<sentencepiece_version value="0.2.0" />
|
172 |
+
<skip_special_tokens value="True" />
|
173 |
+
<streaming_detokenizer value="False" />
|
174 |
+
<tiktoken_version value="0.7.0" />
|
175 |
+
<tokenizer_output_type value="i64" />
|
176 |
+
<tokenizers_version value="0.20.3" />
|
177 |
+
<transformers_version value="4.46.3" />
|
178 |
+
<use_max_padding value="False" />
|
179 |
+
<use_sentencepiece_backend value="False" />
|
180 |
+
<utf8_replace_mode />
|
181 |
+
<with_detokenizer value="True" />
|
182 |
+
</rt_info>
|
183 |
+
</net>
|
openvino_model.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:7f4400442499a079105c0e5738e2592d7d2eb6b1e8e681b01855b9ef8e630519
|
3 |
+
size 4684268108
|
openvino_model.xml
ADDED
The diff for this file is too large to render.
See raw diff
|
|
openvino_tokenizer.bin
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:b3853987129c0b402f05041a313a5d8bc2edcec4dc8d8b8f222225d9c50cffec
|
3 |
+
size 5467273
|
openvino_tokenizer.xml
ADDED
@@ -0,0 +1,777 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<?xml version="1.0"?>
|
2 |
+
<net name="tokenizer" version="11">
|
3 |
+
<layers>
|
4 |
+
<layer id="0" name="Parameter_146758" type="Parameter" version="opset1">
|
5 |
+
<data shape="?" element_type="string" />
|
6 |
+
<output>
|
7 |
+
<port id="0" precision="STRING" names="Parameter_146758">
|
8 |
+
<dim>-1</dim>
|
9 |
+
</port>
|
10 |
+
</output>
|
11 |
+
</layer>
|
12 |
+
<layer id="1" name="Constant_146856" type="Const" version="opset1">
|
13 |
+
<data element_type="i32" shape="" offset="0" size="4" />
|
14 |
+
<output>
|
15 |
+
<port id="0" precision="I32" />
|
16 |
+
</output>
|
17 |
+
</layer>
|
18 |
+
<layer id="2" name="Constant_146857" type="Const" version="opset1">
|
19 |
+
<data element_type="i32" shape="" offset="4" size="4" />
|
20 |
+
<output>
|
21 |
+
<port id="0" precision="I32" />
|
22 |
+
</output>
|
23 |
+
</layer>
|
24 |
+
<layer id="3" name="Constant_146858" type="Const" version="opset1">
|
25 |
+
<data element_type="i32" shape="1" offset="4" size="4" />
|
26 |
+
<output>
|
27 |
+
<port id="0" precision="I32">
|
28 |
+
<dim>1</dim>
|
29 |
+
</port>
|
30 |
+
</output>
|
31 |
+
</layer>
|
32 |
+
<layer id="4" name="Constant_146764" type="Const" version="opset1">
|
33 |
+
<data element_type="i64" shape="" offset="8" size="8" />
|
34 |
+
<output>
|
35 |
+
<port id="0" precision="I64" />
|
36 |
+
</output>
|
37 |
+
</layer>
|
38 |
+
<layer id="5" name="StringTensorUnpack_146759" type="StringTensorUnpack" version="extension">
|
39 |
+
<data mode="begins_ends" />
|
40 |
+
<input>
|
41 |
+
<port id="0" precision="STRING">
|
42 |
+
<dim>-1</dim>
|
43 |
+
</port>
|
44 |
+
</input>
|
45 |
+
<output>
|
46 |
+
<port id="1" precision="I32">
|
47 |
+
<dim>-1</dim>
|
48 |
+
</port>
|
49 |
+
<port id="2" precision="I32">
|
50 |
+
<dim>-1</dim>
|
51 |
+
</port>
|
52 |
+
<port id="3" precision="U8">
|
53 |
+
<dim>-1</dim>
|
54 |
+
</port>
|
55 |
+
</output>
|
56 |
+
</layer>
|
57 |
+
<layer id="6" name="ShapeOf_146760" type="ShapeOf" version="opset3">
|
58 |
+
<data output_type="i64" />
|
59 |
+
<input>
|
60 |
+
<port id="0" precision="I32">
|
61 |
+
<dim>-1</dim>
|
62 |
+
</port>
|
63 |
+
</input>
|
64 |
+
<output>
|
65 |
+
<port id="1" precision="I64">
|
66 |
+
<dim>1</dim>
|
67 |
+
</port>
|
68 |
+
</output>
|
69 |
+
</layer>
|
70 |
+
<layer id="7" name="Constant_146761" type="Const" version="opset1">
|
71 |
+
<data element_type="i64" shape="" offset="8" size="8" />
|
72 |
+
<output>
|
73 |
+
<port id="0" precision="I64" />
|
74 |
+
</output>
|
75 |
+
</layer>
|
76 |
+
<layer id="8" name="Constant_146762" type="Const" version="opset1">
|
77 |
+
<data element_type="i64" shape="" offset="8" size="8" />
|
78 |
+
<output>
|
79 |
+
<port id="0" precision="I64" />
|
80 |
+
</output>
|
81 |
+
</layer>
|
82 |
+
<layer id="9" name="Gather_146763" type="Gather" version="opset8">
|
83 |
+
<data batch_dims="0" />
|
84 |
+
<input>
|
85 |
+
<port id="0" precision="I64">
|
86 |
+
<dim>1</dim>
|
87 |
+
</port>
|
88 |
+
<port id="1" precision="I64" />
|
89 |
+
<port id="2" precision="I64" />
|
90 |
+
</input>
|
91 |
+
<output>
|
92 |
+
<port id="3" precision="I64" />
|
93 |
+
</output>
|
94 |
+
</layer>
|
95 |
+
<layer id="10" name="Constant_146765" type="Const" version="opset1">
|
96 |
+
<data element_type="i64" shape="" offset="16" size="8" />
|
97 |
+
<output>
|
98 |
+
<port id="0" precision="I64" />
|
99 |
+
</output>
|
100 |
+
</layer>
|
101 |
+
<layer id="11" name="Range_146766" type="Range" version="opset4">
|
102 |
+
<data output_type="i32" />
|
103 |
+
<input>
|
104 |
+
<port id="0" precision="I64" />
|
105 |
+
<port id="1" precision="I64" />
|
106 |
+
<port id="2" precision="I64" />
|
107 |
+
</input>
|
108 |
+
<output>
|
109 |
+
<port id="3" precision="I32">
|
110 |
+
<dim>-1</dim>
|
111 |
+
</port>
|
112 |
+
</output>
|
113 |
+
</layer>
|
114 |
+
<layer id="12" name="Constant_146767" type="Const" version="opset1">
|
115 |
+
<data element_type="i64" shape="" offset="16" size="8" />
|
116 |
+
<output>
|
117 |
+
<port id="0" precision="I64" />
|
118 |
+
</output>
|
119 |
+
</layer>
|
120 |
+
<layer id="13" name="Constant_146768" type="Const" version="opset1">
|
121 |
+
<data element_type="i64" shape="" offset="16" size="8" />
|
122 |
+
<output>
|
123 |
+
<port id="0" precision="I64" />
|
124 |
+
</output>
|
125 |
+
</layer>
|
126 |
+
<layer id="14" name="Add_146769" type="Add" version="opset1">
|
127 |
+
<data auto_broadcast="numpy" />
|
128 |
+
<input>
|
129 |
+
<port id="0" precision="I64" />
|
130 |
+
<port id="1" precision="I64" />
|
131 |
+
</input>
|
132 |
+
<output>
|
133 |
+
<port id="2" precision="I64" />
|
134 |
+
</output>
|
135 |
+
</layer>
|
136 |
+
<layer id="15" name="Constant_146770" type="Const" version="opset1">
|
137 |
+
<data element_type="i64" shape="" offset="16" size="8" />
|
138 |
+
<output>
|
139 |
+
<port id="0" precision="I64" />
|
140 |
+
</output>
|
141 |
+
</layer>
|
142 |
+
<layer id="16" name="Range_146771" type="Range" version="opset4">
|
143 |
+
<data output_type="i32" />
|
144 |
+
<input>
|
145 |
+
<port id="0" precision="I64" />
|
146 |
+
<port id="1" precision="I64" />
|
147 |
+
<port id="2" precision="I64" />
|
148 |
+
</input>
|
149 |
+
<output>
|
150 |
+
<port id="3" precision="I32">
|
151 |
+
<dim>-1</dim>
|
152 |
+
</port>
|
153 |
+
</output>
|
154 |
+
</layer>
|
155 |
+
<layer id="17" name="Constant_146833" type="Const" version="opset1">
|
156 |
+
<data element_type="u8" shape="17854" offset="24" size="17854" />
|
157 |
+
<output>
|
158 |
+
<port id="0" precision="U8">
|
159 |
+
<dim>17854</dim>
|
160 |
+
</port>
|
161 |
+
</output>
|
162 |
+
</layer>
|
163 |
+
<layer id="18" name="SpecialTokensSplit_146834" type="SpecialTokensSplit" version="extension">
|
164 |
+
<input>
|
165 |
+
<port id="0" precision="I32">
|
166 |
+
<dim>-1</dim>
|
167 |
+
</port>
|
168 |
+
<port id="1" precision="I32">
|
169 |
+
<dim>-1</dim>
|
170 |
+
</port>
|
171 |
+
<port id="2" precision="I32">
|
172 |
+
<dim>-1</dim>
|
173 |
+
</port>
|
174 |
+
<port id="3" precision="I32">
|
175 |
+
<dim>-1</dim>
|
176 |
+
</port>
|
177 |
+
<port id="4" precision="U8">
|
178 |
+
<dim>-1</dim>
|
179 |
+
</port>
|
180 |
+
<port id="5" precision="U8">
|
181 |
+
<dim>17854</dim>
|
182 |
+
</port>
|
183 |
+
</input>
|
184 |
+
<output>
|
185 |
+
<port id="6" precision="I32">
|
186 |
+
<dim>-1</dim>
|
187 |
+
</port>
|
188 |
+
<port id="7" precision="I32">
|
189 |
+
<dim>-1</dim>
|
190 |
+
</port>
|
191 |
+
<port id="8" precision="I32">
|
192 |
+
<dim>-1</dim>
|
193 |
+
</port>
|
194 |
+
<port id="9" precision="I32">
|
195 |
+
<dim>-1</dim>
|
196 |
+
</port>
|
197 |
+
<port id="10" precision="U8">
|
198 |
+
<dim>-1</dim>
|
199 |
+
</port>
|
200 |
+
<port id="11" precision="BOOL">
|
201 |
+
<dim>-1</dim>
|
202 |
+
</port>
|
203 |
+
</output>
|
204 |
+
</layer>
|
205 |
+
<layer id="19" name="Constant_146836" type="Const" version="opset1">
|
206 |
+
<data element_type="u8" shape="211" offset="17878" size="211" />
|
207 |
+
<output>
|
208 |
+
<port id="0" precision="U8">
|
209 |
+
<dim>211</dim>
|
210 |
+
</port>
|
211 |
+
</output>
|
212 |
+
</layer>
|
213 |
+
<layer id="20" name="RegexSplit_146837" type="RegexSplit" version="extension">
|
214 |
+
<data behaviour="isolate" invert="false" max_splits="-1" />
|
215 |
+
<input>
|
216 |
+
<port id="0" precision="I32">
|
217 |
+
<dim>-1</dim>
|
218 |
+
</port>
|
219 |
+
<port id="1" precision="I32">
|
220 |
+
<dim>-1</dim>
|
221 |
+
</port>
|
222 |
+
<port id="2" precision="I32">
|
223 |
+
<dim>-1</dim>
|
224 |
+
</port>
|
225 |
+
<port id="3" precision="I32">
|
226 |
+
<dim>-1</dim>
|
227 |
+
</port>
|
228 |
+
<port id="4" precision="U8">
|
229 |
+
<dim>-1</dim>
|
230 |
+
</port>
|
231 |
+
<port id="5" precision="BOOL">
|
232 |
+
<dim>-1</dim>
|
233 |
+
</port>
|
234 |
+
<port id="6" precision="U8">
|
235 |
+
<dim>211</dim>
|
236 |
+
</port>
|
237 |
+
</input>
|
238 |
+
<output>
|
239 |
+
<port id="7" precision="I32">
|
240 |
+
<dim>-1</dim>
|
241 |
+
</port>
|
242 |
+
<port id="8" precision="I32">
|
243 |
+
<dim>-1</dim>
|
244 |
+
</port>
|
245 |
+
<port id="9" precision="I32">
|
246 |
+
<dim>-1</dim>
|
247 |
+
</port>
|
248 |
+
<port id="10" precision="I32">
|
249 |
+
<dim>-1</dim>
|
250 |
+
</port>
|
251 |
+
<port id="11" precision="U8">
|
252 |
+
<dim>-1</dim>
|
253 |
+
</port>
|
254 |
+
<port id="12" precision="BOOL">
|
255 |
+
<dim>-1</dim>
|
256 |
+
</port>
|
257 |
+
</output>
|
258 |
+
</layer>
|
259 |
+
<layer id="21" name="Constant_146839" type="Const" version="opset1">
|
260 |
+
<data element_type="u8" shape="1415405" offset="18089" size="1415405" />
|
261 |
+
<output>
|
262 |
+
<port id="0" precision="U8">
|
263 |
+
<dim>1415405</dim>
|
264 |
+
</port>
|
265 |
+
</output>
|
266 |
+
</layer>
|
267 |
+
<layer id="22" name="StringTensorUnpack_146840" type="StringTensorUnpack" version="extension">
|
268 |
+
<data mode="begins_ends" />
|
269 |
+
<input>
|
270 |
+
<port id="0" precision="U8">
|
271 |
+
<dim>1415405</dim>
|
272 |
+
</port>
|
273 |
+
</input>
|
274 |
+
<output>
|
275 |
+
<port id="1" precision="I32">
|
276 |
+
<dim>-1</dim>
|
277 |
+
</port>
|
278 |
+
<port id="2" precision="I32">
|
279 |
+
<dim>-1</dim>
|
280 |
+
</port>
|
281 |
+
<port id="3" precision="U8">
|
282 |
+
<dim>-1</dim>
|
283 |
+
</port>
|
284 |
+
</output>
|
285 |
+
</layer>
|
286 |
+
<layer id="23" name="Constant_146845" type="Const" version="opset1">
|
287 |
+
<data element_type="u8" shape="2067404" offset="1433494" size="2067404" />
|
288 |
+
<output>
|
289 |
+
<port id="0" precision="U8">
|
290 |
+
<dim>2067404</dim>
|
291 |
+
</port>
|
292 |
+
</output>
|
293 |
+
</layer>
|
294 |
+
<layer id="24" name="StringTensorUnpack_146846" type="StringTensorUnpack" version="extension">
|
295 |
+
<data mode="begins_ends" />
|
296 |
+
<input>
|
297 |
+
<port id="0" precision="U8">
|
298 |
+
<dim>2067404</dim>
|
299 |
+
</port>
|
300 |
+
</input>
|
301 |
+
<output>
|
302 |
+
<port id="1" precision="I32">
|
303 |
+
<dim>-1</dim>
|
304 |
+
</port>
|
305 |
+
<port id="2" precision="I32">
|
306 |
+
<dim>-1</dim>
|
307 |
+
</port>
|
308 |
+
<port id="3" precision="U8">
|
309 |
+
<dim>-1</dim>
|
310 |
+
</port>
|
311 |
+
</output>
|
312 |
+
</layer>
|
313 |
+
<layer id="25" name="Constant_146848" type="Const" version="opset1">
|
314 |
+
<data element_type="u8" shape="1945525" offset="3500898" size="1945525" />
|
315 |
+
<output>
|
316 |
+
<port id="0" precision="U8">
|
317 |
+
<dim>1945525</dim>
|
318 |
+
</port>
|
319 |
+
</output>
|
320 |
+
</layer>
|
321 |
+
<layer id="26" name="StringTensorUnpack_146849" type="StringTensorUnpack" version="extension">
|
322 |
+
<data mode="begins_ends" />
|
323 |
+
<input>
|
324 |
+
<port id="0" precision="U8">
|
325 |
+
<dim>1945525</dim>
|
326 |
+
</port>
|
327 |
+
</input>
|
328 |
+
<output>
|
329 |
+
<port id="1" precision="I32">
|
330 |
+
<dim>-1</dim>
|
331 |
+
</port>
|
332 |
+
<port id="2" precision="I32">
|
333 |
+
<dim>-1</dim>
|
334 |
+
</port>
|
335 |
+
<port id="3" precision="U8">
|
336 |
+
<dim>-1</dim>
|
337 |
+
</port>
|
338 |
+
</output>
|
339 |
+
</layer>
|
340 |
+
<layer id="27" name="Constant_146842" type="Const" version="opset1">
|
341 |
+
<data element_type="u8" shape="16850" offset="5446423" size="16850" />
|
342 |
+
<output>
|
343 |
+
<port id="0" precision="U8">
|
344 |
+
<dim>16850</dim>
|
345 |
+
</port>
|
346 |
+
</output>
|
347 |
+
</layer>
|
348 |
+
<layer id="28" name="StringTensorUnpack_146843" type="StringTensorUnpack" version="extension">
|
349 |
+
<data mode="begins_ends" />
|
350 |
+
<input>
|
351 |
+
<port id="0" precision="U8">
|
352 |
+
<dim>16850</dim>
|
353 |
+
</port>
|
354 |
+
</input>
|
355 |
+
<output>
|
356 |
+
<port id="1" precision="I32">
|
357 |
+
<dim>-1</dim>
|
358 |
+
</port>
|
359 |
+
<port id="2" precision="I32">
|
360 |
+
<dim>-1</dim>
|
361 |
+
</port>
|
362 |
+
<port id="3" precision="U8">
|
363 |
+
<dim>-1</dim>
|
364 |
+
</port>
|
365 |
+
</output>
|
366 |
+
</layer>
|
367 |
+
<layer id="29" name="Constant_146850" type="Const" version="opset1">
|
368 |
+
<data element_type="i32" shape="999" offset="5463273" size="3996" />
|
369 |
+
<output>
|
370 |
+
<port id="0" precision="I32">
|
371 |
+
<dim>999</dim>
|
372 |
+
</port>
|
373 |
+
</output>
|
374 |
+
</layer>
|
375 |
+
<layer id="30" name="BPETokenizer_146851" type="BPETokenizer" version="extension">
|
376 |
+
<data unk_token="" fuse_unk="false" suffix_indicator="" end_suffix="" byte_fallback="false" cache_capacity="26214" />
|
377 |
+
<input>
|
378 |
+
<port id="0" precision="I32">
|
379 |
+
<dim>-1</dim>
|
380 |
+
</port>
|
381 |
+
<port id="1" precision="I32">
|
382 |
+
<dim>-1</dim>
|
383 |
+
</port>
|
384 |
+
<port id="2" precision="I32">
|
385 |
+
<dim>-1</dim>
|
386 |
+
</port>
|
387 |
+
<port id="3" precision="I32">
|
388 |
+
<dim>-1</dim>
|
389 |
+
</port>
|
390 |
+
<port id="4" precision="U8">
|
391 |
+
<dim>-1</dim>
|
392 |
+
</port>
|
393 |
+
<port id="5" precision="I32">
|
394 |
+
<dim>-1</dim>
|
395 |
+
</port>
|
396 |
+
<port id="6" precision="I32">
|
397 |
+
<dim>-1</dim>
|
398 |
+
</port>
|
399 |
+
<port id="7" precision="U8">
|
400 |
+
<dim>-1</dim>
|
401 |
+
</port>
|
402 |
+
<port id="8" precision="I32">
|
403 |
+
<dim>-1</dim>
|
404 |
+
</port>
|
405 |
+
<port id="9" precision="I32">
|
406 |
+
<dim>-1</dim>
|
407 |
+
</port>
|
408 |
+
<port id="10" precision="U8">
|
409 |
+
<dim>-1</dim>
|
410 |
+
</port>
|
411 |
+
<port id="11" precision="I32">
|
412 |
+
<dim>-1</dim>
|
413 |
+
</port>
|
414 |
+
<port id="12" precision="I32">
|
415 |
+
<dim>-1</dim>
|
416 |
+
</port>
|
417 |
+
<port id="13" precision="U8">
|
418 |
+
<dim>-1</dim>
|
419 |
+
</port>
|
420 |
+
<port id="14" precision="I32">
|
421 |
+
<dim>-1</dim>
|
422 |
+
</port>
|
423 |
+
<port id="15" precision="I32">
|
424 |
+
<dim>-1</dim>
|
425 |
+
</port>
|
426 |
+
<port id="16" precision="U8">
|
427 |
+
<dim>-1</dim>
|
428 |
+
</port>
|
429 |
+
<port id="17" precision="I32">
|
430 |
+
<dim>999</dim>
|
431 |
+
</port>
|
432 |
+
</input>
|
433 |
+
<output>
|
434 |
+
<port id="18" precision="I32">
|
435 |
+
<dim>-1</dim>
|
436 |
+
</port>
|
437 |
+
<port id="19" precision="I32">
|
438 |
+
<dim>-1</dim>
|
439 |
+
</port>
|
440 |
+
<port id="20" precision="I32">
|
441 |
+
<dim>-1</dim>
|
442 |
+
</port>
|
443 |
+
</output>
|
444 |
+
</layer>
|
445 |
+
<layer id="31" name="Subtract_146852" type="Subtract" version="opset1">
|
446 |
+
<data auto_broadcast="numpy" />
|
447 |
+
<input>
|
448 |
+
<port id="0" precision="I32">
|
449 |
+
<dim>-1</dim>
|
450 |
+
</port>
|
451 |
+
<port id="1" precision="I32">
|
452 |
+
<dim>-1</dim>
|
453 |
+
</port>
|
454 |
+
</input>
|
455 |
+
<output>
|
456 |
+
<port id="2" precision="I32">
|
457 |
+
<dim>-1</dim>
|
458 |
+
</port>
|
459 |
+
</output>
|
460 |
+
</layer>
|
461 |
+
<layer id="32" name="Constant_146853" type="Const" version="opset1">
|
462 |
+
<data element_type="i32" shape="" offset="5467269" size="4" />
|
463 |
+
<output>
|
464 |
+
<port id="0" precision="I32" />
|
465 |
+
</output>
|
466 |
+
</layer>
|
467 |
+
<layer id="33" name="Minimum_146854" type="Minimum" version="opset1">
|
468 |
+
<data auto_broadcast="numpy" />
|
469 |
+
<input>
|
470 |
+
<port id="0" precision="I32">
|
471 |
+
<dim>-1</dim>
|
472 |
+
</port>
|
473 |
+
<port id="1" precision="I32" />
|
474 |
+
</input>
|
475 |
+
<output>
|
476 |
+
<port id="2" precision="I32">
|
477 |
+
<dim>-1</dim>
|
478 |
+
</port>
|
479 |
+
</output>
|
480 |
+
</layer>
|
481 |
+
<layer id="34" name="Subtract_146855" type="Subtract" version="opset1">
|
482 |
+
<data auto_broadcast="numpy" />
|
483 |
+
<input>
|
484 |
+
<port id="0" precision="I32">
|
485 |
+
<dim>-1</dim>
|
486 |
+
</port>
|
487 |
+
<port id="1" precision="I32">
|
488 |
+
<dim>-1</dim>
|
489 |
+
</port>
|
490 |
+
</input>
|
491 |
+
<output>
|
492 |
+
<port id="2" precision="I32">
|
493 |
+
<dim>-1</dim>
|
494 |
+
</port>
|
495 |
+
</output>
|
496 |
+
</layer>
|
497 |
+
<layer id="35" name="Constant_146859" type="Const" version="opset1">
|
498 |
+
<data element_type="i32" shape="2" offset="8" size="8" />
|
499 |
+
<output>
|
500 |
+
<port id="0" precision="I32">
|
501 |
+
<dim>2</dim>
|
502 |
+
</port>
|
503 |
+
</output>
|
504 |
+
</layer>
|
505 |
+
<layer id="36" name="CombineSegments_146860" type="CombineSegments" version="extension">
|
506 |
+
<input>
|
507 |
+
<port id="0" precision="I32" />
|
508 |
+
<port id="1" precision="I32" />
|
509 |
+
<port id="2" precision="I32">
|
510 |
+
<dim>1</dim>
|
511 |
+
</port>
|
512 |
+
<port id="3" precision="I32">
|
513 |
+
<dim>-1</dim>
|
514 |
+
</port>
|
515 |
+
<port id="4" precision="I32">
|
516 |
+
<dim>-1</dim>
|
517 |
+
</port>
|
518 |
+
<port id="5" precision="I32">
|
519 |
+
<dim>-1</dim>
|
520 |
+
</port>
|
521 |
+
<port id="6" precision="I32">
|
522 |
+
<dim>2</dim>
|
523 |
+
</port>
|
524 |
+
</input>
|
525 |
+
<output>
|
526 |
+
<port id="7" precision="I32">
|
527 |
+
<dim>-1</dim>
|
528 |
+
</port>
|
529 |
+
<port id="8" precision="I32">
|
530 |
+
<dim>-1</dim>
|
531 |
+
</port>
|
532 |
+
<port id="9" precision="I32">
|
533 |
+
<dim>-1</dim>
|
534 |
+
</port>
|
535 |
+
<port id="10" precision="I32">
|
536 |
+
<dim>-1</dim>
|
537 |
+
</port>
|
538 |
+
<port id="11" precision="I32">
|
539 |
+
<dim>-1</dim>
|
540 |
+
</port>
|
541 |
+
<port id="12" precision="I32">
|
542 |
+
<dim>-1</dim>
|
543 |
+
</port>
|
544 |
+
</output>
|
545 |
+
</layer>
|
546 |
+
<layer id="37" name="Subtract_146861" type="Subtract" version="opset1">
|
547 |
+
<data auto_broadcast="numpy" />
|
548 |
+
<input>
|
549 |
+
<port id="0" precision="I32">
|
550 |
+
<dim>-1</dim>
|
551 |
+
</port>
|
552 |
+
<port id="1" precision="I32">
|
553 |
+
<dim>-1</dim>
|
554 |
+
</port>
|
555 |
+
</input>
|
556 |
+
<output>
|
557 |
+
<port id="2" precision="I32">
|
558 |
+
<dim>-1</dim>
|
559 |
+
</port>
|
560 |
+
</output>
|
561 |
+
</layer>
|
562 |
+
<layer id="38" name="Constant_146862" type="Const" version="opset1">
|
563 |
+
<data element_type="i32" shape="" offset="0" size="4" />
|
564 |
+
<output>
|
565 |
+
<port id="0" precision="I32" />
|
566 |
+
</output>
|
567 |
+
</layer>
|
568 |
+
<layer id="39" name="ReduceMax_146863" type="ReduceMax" version="opset1">
|
569 |
+
<data keep_dims="false" />
|
570 |
+
<input>
|
571 |
+
<port id="0" precision="I32">
|
572 |
+
<dim>-1</dim>
|
573 |
+
</port>
|
574 |
+
<port id="1" precision="I32" />
|
575 |
+
</input>
|
576 |
+
<output>
|
577 |
+
<port id="2" precision="I32" />
|
578 |
+
</output>
|
579 |
+
</layer>
|
580 |
+
<layer id="40" name="Constant_146864" type="Const" version="opset1">
|
581 |
+
<data element_type="i32" shape="" offset="0" size="4" />
|
582 |
+
<output>
|
583 |
+
<port id="0" precision="I32" />
|
584 |
+
</output>
|
585 |
+
</layer>
|
586 |
+
<layer id="41" name="RaggedToDense_146865" type="RaggedToDense" version="extension">
|
587 |
+
<data pad_right="false" />
|
588 |
+
<input>
|
589 |
+
<port id="0" precision="I32">
|
590 |
+
<dim>-1</dim>
|
591 |
+
</port>
|
592 |
+
<port id="1" precision="I32">
|
593 |
+
<dim>-1</dim>
|
594 |
+
</port>
|
595 |
+
<port id="2" precision="I32">
|
596 |
+
<dim>-1</dim>
|
597 |
+
</port>
|
598 |
+
<port id="3" precision="I32" />
|
599 |
+
<port id="4" precision="I32" />
|
600 |
+
</input>
|
601 |
+
<output>
|
602 |
+
<port id="5" precision="I32">
|
603 |
+
<dim>-1</dim>
|
604 |
+
<dim>-1</dim>
|
605 |
+
</port>
|
606 |
+
<port id="6" precision="BOOL">
|
607 |
+
<dim>-1</dim>
|
608 |
+
<dim>-1</dim>
|
609 |
+
</port>
|
610 |
+
</output>
|
611 |
+
</layer>
|
612 |
+
<layer id="42" name="Convert_146866" type="Convert" version="opset1">
|
613 |
+
<data destination_type="i32" />
|
614 |
+
<input>
|
615 |
+
<port id="0" precision="BOOL">
|
616 |
+
<dim>-1</dim>
|
617 |
+
<dim>-1</dim>
|
618 |
+
</port>
|
619 |
+
</input>
|
620 |
+
<output>
|
621 |
+
<port id="1" precision="I32">
|
622 |
+
<dim>-1</dim>
|
623 |
+
<dim>-1</dim>
|
624 |
+
</port>
|
625 |
+
</output>
|
626 |
+
</layer>
|
627 |
+
<layer id="43" name="Convert_146866" type="Convert" version="opset1">
|
628 |
+
<data destination_type="i64" />
|
629 |
+
<input>
|
630 |
+
<port id="0" precision="I32">
|
631 |
+
<dim>-1</dim>
|
632 |
+
<dim>-1</dim>
|
633 |
+
</port>
|
634 |
+
</input>
|
635 |
+
<output>
|
636 |
+
<port id="1" precision="I64" names="attention_mask">
|
637 |
+
<dim>-1</dim>
|
638 |
+
<dim>-1</dim>
|
639 |
+
</port>
|
640 |
+
</output>
|
641 |
+
</layer>
|
642 |
+
<layer id="45" name="RaggedToDense_146865.0" type="Convert" version="opset1">
|
643 |
+
<data destination_type="i64" />
|
644 |
+
<input>
|
645 |
+
<port id="0" precision="I32">
|
646 |
+
<dim>-1</dim>
|
647 |
+
<dim>-1</dim>
|
648 |
+
</port>
|
649 |
+
</input>
|
650 |
+
<output>
|
651 |
+
<port id="1" precision="I64" names="input_ids">
|
652 |
+
<dim>-1</dim>
|
653 |
+
<dim>-1</dim>
|
654 |
+
</port>
|
655 |
+
</output>
|
656 |
+
</layer>
|
657 |
+
<layer id="46" name="Result_146869" type="Result" version="opset1">
|
658 |
+
<input>
|
659 |
+
<port id="0" precision="I64">
|
660 |
+
<dim>-1</dim>
|
661 |
+
<dim>-1</dim>
|
662 |
+
</port>
|
663 |
+
</input>
|
664 |
+
</layer>
|
665 |
+
<layer id="44" name="Result_146871" type="Result" version="opset1">
|
666 |
+
<input>
|
667 |
+
<port id="0" precision="I64">
|
668 |
+
<dim>-1</dim>
|
669 |
+
<dim>-1</dim>
|
670 |
+
</port>
|
671 |
+
</input>
|
672 |
+
</layer>
|
673 |
+
</layers>
|
674 |
+
<edges>
|
675 |
+
<edge from-layer="0" from-port="0" to-layer="5" to-port="0" />
|
676 |
+
<edge from-layer="1" from-port="0" to-layer="36" to-port="0" />
|
677 |
+
<edge from-layer="2" from-port="0" to-layer="36" to-port="1" />
|
678 |
+
<edge from-layer="3" from-port="0" to-layer="36" to-port="2" />
|
679 |
+
<edge from-layer="4" from-port="0" to-layer="11" to-port="0" />
|
680 |
+
<edge from-layer="5" from-port="1" to-layer="6" to-port="0" />
|
681 |
+
<edge from-layer="5" from-port="3" to-layer="18" to-port="4" />
|
682 |
+
<edge from-layer="5" from-port="2" to-layer="18" to-port="3" />
|
683 |
+
<edge from-layer="5" from-port="1" to-layer="18" to-port="2" />
|
684 |
+
<edge from-layer="6" from-port="1" to-layer="9" to-port="0" />
|
685 |
+
<edge from-layer="7" from-port="0" to-layer="9" to-port="1" />
|
686 |
+
<edge from-layer="8" from-port="0" to-layer="9" to-port="2" />
|
687 |
+
<edge from-layer="9" from-port="3" to-layer="14" to-port="0" />
|
688 |
+
<edge from-layer="9" from-port="3" to-layer="11" to-port="1" />
|
689 |
+
<edge from-layer="10" from-port="0" to-layer="11" to-port="2" />
|
690 |
+
<edge from-layer="11" from-port="3" to-layer="18" to-port="0" />
|
691 |
+
<edge from-layer="12" from-port="0" to-layer="16" to-port="0" />
|
692 |
+
<edge from-layer="13" from-port="0" to-layer="14" to-port="1" />
|
693 |
+
<edge from-layer="14" from-port="2" to-layer="16" to-port="1" />
|
694 |
+
<edge from-layer="15" from-port="0" to-layer="16" to-port="2" />
|
695 |
+
<edge from-layer="16" from-port="3" to-layer="18" to-port="1" />
|
696 |
+
<edge from-layer="17" from-port="0" to-layer="18" to-port="5" />
|
697 |
+
<edge from-layer="18" from-port="9" to-layer="20" to-port="3" />
|
698 |
+
<edge from-layer="18" from-port="11" to-layer="20" to-port="5" />
|
699 |
+
<edge from-layer="18" from-port="10" to-layer="20" to-port="4" />
|
700 |
+
<edge from-layer="18" from-port="8" to-layer="20" to-port="2" />
|
701 |
+
<edge from-layer="18" from-port="7" to-layer="20" to-port="1" />
|
702 |
+
<edge from-layer="18" from-port="6" to-layer="20" to-port="0" />
|
703 |
+
<edge from-layer="19" from-port="0" to-layer="20" to-port="6" />
|
704 |
+
<edge from-layer="20" from-port="7" to-layer="30" to-port="0" />
|
705 |
+
<edge from-layer="20" from-port="8" to-layer="30" to-port="1" />
|
706 |
+
<edge from-layer="20" from-port="9" to-layer="30" to-port="2" />
|
707 |
+
<edge from-layer="20" from-port="10" to-layer="30" to-port="3" />
|
708 |
+
<edge from-layer="20" from-port="11" to-layer="30" to-port="4" />
|
709 |
+
<edge from-layer="21" from-port="0" to-layer="22" to-port="0" />
|
710 |
+
<edge from-layer="22" from-port="3" to-layer="30" to-port="7" />
|
711 |
+
<edge from-layer="22" from-port="1" to-layer="30" to-port="5" />
|
712 |
+
<edge from-layer="22" from-port="2" to-layer="30" to-port="6" />
|
713 |
+
<edge from-layer="23" from-port="0" to-layer="24" to-port="0" />
|
714 |
+
<edge from-layer="24" from-port="1" to-layer="30" to-port="8" />
|
715 |
+
<edge from-layer="24" from-port="2" to-layer="30" to-port="9" />
|
716 |
+
<edge from-layer="24" from-port="3" to-layer="30" to-port="10" />
|
717 |
+
<edge from-layer="25" from-port="0" to-layer="26" to-port="0" />
|
718 |
+
<edge from-layer="26" from-port="3" to-layer="30" to-port="13" />
|
719 |
+
<edge from-layer="26" from-port="1" to-layer="30" to-port="11" />
|
720 |
+
<edge from-layer="26" from-port="2" to-layer="30" to-port="12" />
|
721 |
+
<edge from-layer="27" from-port="0" to-layer="28" to-port="0" />
|
722 |
+
<edge from-layer="28" from-port="1" to-layer="30" to-port="14" />
|
723 |
+
<edge from-layer="28" from-port="2" to-layer="30" to-port="15" />
|
724 |
+
<edge from-layer="28" from-port="3" to-layer="30" to-port="16" />
|
725 |
+
<edge from-layer="29" from-port="0" to-layer="30" to-port="17" />
|
726 |
+
<edge from-layer="30" from-port="20" to-layer="36" to-port="5" />
|
727 |
+
<edge from-layer="30" from-port="19" to-layer="36" to-port="4" />
|
728 |
+
<edge from-layer="30" from-port="19" to-layer="34" to-port="0" />
|
729 |
+
<edge from-layer="30" from-port="18" to-layer="31" to-port="1" />
|
730 |
+
<edge from-layer="30" from-port="19" to-layer="31" to-port="0" />
|
731 |
+
<edge from-layer="31" from-port="2" to-layer="33" to-port="0" />
|
732 |
+
<edge from-layer="32" from-port="0" to-layer="33" to-port="1" />
|
733 |
+
<edge from-layer="33" from-port="2" to-layer="34" to-port="1" />
|
734 |
+
<edge from-layer="34" from-port="2" to-layer="36" to-port="3" />
|
735 |
+
<edge from-layer="35" from-port="0" to-layer="36" to-port="6" />
|
736 |
+
<edge from-layer="36" from-port="8" to-layer="37" to-port="0" />
|
737 |
+
<edge from-layer="36" from-port="7" to-layer="37" to-port="1" />
|
738 |
+
<edge from-layer="36" from-port="7" to-layer="41" to-port="0" />
|
739 |
+
<edge from-layer="36" from-port="8" to-layer="41" to-port="1" />
|
740 |
+
<edge from-layer="36" from-port="9" to-layer="41" to-port="2" />
|
741 |
+
<edge from-layer="37" from-port="2" to-layer="39" to-port="0" />
|
742 |
+
<edge from-layer="38" from-port="0" to-layer="39" to-port="1" />
|
743 |
+
<edge from-layer="39" from-port="2" to-layer="41" to-port="3" />
|
744 |
+
<edge from-layer="40" from-port="0" to-layer="41" to-port="4" />
|
745 |
+
<edge from-layer="41" from-port="6" to-layer="42" to-port="0" />
|
746 |
+
<edge from-layer="41" from-port="5" to-layer="45" to-port="0" />
|
747 |
+
<edge from-layer="42" from-port="1" to-layer="43" to-port="0" />
|
748 |
+
<edge from-layer="43" from-port="1" to-layer="44" to-port="0" />
|
749 |
+
<edge from-layer="45" from-port="1" to-layer="46" to-port="0" />
|
750 |
+
</edges>
|
751 |
+
<rt_info>
|
752 |
+
<add_attention_mask value="True" />
|
753 |
+
<add_prefix_space />
|
754 |
+
<add_special_tokens value="True" />
|
755 |
+
<bos_token_id value="1" />
|
756 |
+
<chat_template value="{%- if messages[0]["role"] == "system" %} {%- set system_message = messages[0]["content"] %} {%- set loop_messages = messages[1:] %} {%- else %} {%- set loop_messages = messages %} {%- endif %} {%- if not tools is defined %} {%- set tools = none %} {%- endif %} {%- set user_messages = loop_messages | selectattr("role", "equalto", "user") | list %} {#- This block checks for alternating user/assistant messages, skipping tool calling messages #} {%- set ns = namespace() %} {%- set ns.index = 0 %} {%- for message in loop_messages %} {%- if not (message.role == "tool" or message.role == "tool_results" or (message.tool_calls is defined and message.tool_calls is not none)) %} {%- if (message["role"] == "user") != (ns.index % 2 == 0) %} {{- raise_exception("After the optional system message, conversation roles must alternate user/assistant/user/assistant/...") }} {%- endif %} {%- set ns.index = ns.index + 1 %} {%- endif %} {%- endfor %} {{- bos_token }} {%- for message in loop_messages %} {%- if message["role"] == "user" %} {%- if tools is not none and (message == user_messages[-1]) %} {{- "[AVAILABLE_TOOLS][" }} {%- for tool in tools %} {%- set tool = tool.function %} {{- '{"type": "function", "function": {' }} {%- for key, val in tool.items() if key != "return" %} {%- if val is string %} {{- '"' + key + '": "' + val + '"' }} {%- else %} {{- '"' + key + '": ' + val|tojson }} {%- endif %} {%- if not loop.last %} {{- ", " }} {%- endif %} {%- endfor %} {{- "}}" }} {%- if not loop.last %} {{- ", " }} {%- else %} {{- "]" }} {%- endif %} {%- endfor %} {{- "[/AVAILABLE_TOOLS]" }} {%- endif %} {%- if loop.last and system_message is defined %} {{- "[INST]" + system_message + "\n\n" + message["content"] + "[/INST]" }} {%- else %} {{- "[INST]" + message["content"] + "[/INST]" }} {%- endif %} {%- elif (message.tool_calls is defined and message.tool_calls is not none) %} {{- "[TOOL_CALLS][" }} {%- for tool_call in message.tool_calls %} {%- set out = tool_call.function|tojson %} {{- out[:-1] }} {%- if not tool_call.id is defined or tool_call.id|length != 9 %} {{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") }} {%- endif %} {{- ', "id": "' + tool_call.id + '"}' }} {%- if not loop.last %} {{- ", " }} {%- else %} {{- "]" + eos_token }} {%- endif %} {%- endfor %} {%- elif message["role"] == "assistant" %} {{- message["content"] + eos_token}} {%- elif message["role"] == "tool_results" or message["role"] == "tool" %} {%- if message.content is defined and message.content.content is defined %} {%- set content = message.content.content %} {%- else %} {%- set content = message.content %} {%- endif %} {{- '[TOOL_RESULTS]{"content": ' + content|string + ", " }} {%- if not message.tool_call_id is defined or message.tool_call_id|length != 9 %} {{- raise_exception("Tool call IDs should be alphanumeric strings with length 9!") }} {%- endif %} {{- '"call_id": "' + message.tool_call_id + '"}[/TOOL_RESULTS]' }} {%- else %} {{- raise_exception("Only user and assistant roles are supported, with the exception of an initial optional system message!") }} {%- endif %} {%- endfor %} " />
|
757 |
+
<clean_up_tokenization_spaces />
|
758 |
+
<detokenizer_input_type value="i64" />
|
759 |
+
<eos_token_id value="2" />
|
760 |
+
<handle_special_tokens_with_re />
|
761 |
+
<number_of_inputs value="1" />
|
762 |
+
<openvino_tokenizers_version value="2024.5.0.0" />
|
763 |
+
<openvino_version value="2024.5.0" />
|
764 |
+
<original_tokenizer_class value="<class 'transformers.models.llama.tokenization_llama_fast.LlamaTokenizerFast'>" />
|
765 |
+
<sentencepiece_version value="0.2.0" />
|
766 |
+
<skip_special_tokens value="True" />
|
767 |
+
<streaming_detokenizer value="False" />
|
768 |
+
<tiktoken_version value="0.7.0" />
|
769 |
+
<tokenizer_output_type value="i64" />
|
770 |
+
<tokenizers_version value="0.20.3" />
|
771 |
+
<transformers_version value="4.46.3" />
|
772 |
+
<use_max_padding value="False" />
|
773 |
+
<use_sentencepiece_backend value="False" />
|
774 |
+
<utf8_replace_mode />
|
775 |
+
<with_detokenizer value="True" />
|
776 |
+
</rt_info>
|
777 |
+
</net>
|
special_tokens_map.json
ADDED
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"bos_token": {
|
3 |
+
"content": "<s>",
|
4 |
+
"lstrip": false,
|
5 |
+
"normalized": false,
|
6 |
+
"rstrip": false,
|
7 |
+
"single_word": false
|
8 |
+
},
|
9 |
+
"eos_token": {
|
10 |
+
"content": "</s>",
|
11 |
+
"lstrip": false,
|
12 |
+
"normalized": false,
|
13 |
+
"rstrip": false,
|
14 |
+
"single_word": false
|
15 |
+
},
|
16 |
+
"unk_token": {
|
17 |
+
"content": "<unk>",
|
18 |
+
"lstrip": false,
|
19 |
+
"normalized": false,
|
20 |
+
"rstrip": false,
|
21 |
+
"single_word": false
|
22 |
+
}
|
23 |
+
}
|
tokenizer.json
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:d7edbeaf20dd7f571b5dd1c54d9ace4f9b6299127cc7ba2afb14a6d51a4a79a4
|
3 |
+
size 17078136
|
tokenizer_config.json
ADDED
The diff for this file is too large to render.
See raw diff
|
|