Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -24,12 +24,12 @@ llm_model = None
|
|
24 |
|
25 |
hf_hub_download(
|
26 |
repo_id="bartowski/Dolphin3.0-Llama3.2-1B-GGUF",
|
27 |
-
filename="Dolphin3.0-Llama3.2-1B-
|
28 |
local_dir="./models",
|
29 |
)
|
30 |
hf_hub_download(
|
31 |
repo_id="bartowski/Dolphin3.0-Qwen2.5-0.5B-GGUF",
|
32 |
-
filename="Dolphin3.0-Qwen2.5-0.5B-
|
33 |
local_dir="./models",
|
34 |
)
|
35 |
|
@@ -145,10 +145,10 @@ demo = gr.ChatInterface(
|
|
145 |
additional_inputs=[
|
146 |
gr.Dropdown(
|
147 |
choices=[
|
148 |
-
"Dolphin3.0-Llama3.2-1B-
|
149 |
-
"Dolphin3.0-Qwen2.5-0.5B-
|
150 |
],
|
151 |
-
value="Dolphin3.0-Qwen2.5-0.5B-
|
152 |
label="Model",
|
153 |
info="Select the AI model to use for chat",
|
154 |
),
|
|
|
24 |
|
25 |
hf_hub_download(
|
26 |
repo_id="bartowski/Dolphin3.0-Llama3.2-1B-GGUF",
|
27 |
+
filename="Dolphin3.0-Llama3.2-1B-Q6_K.gguf",
|
28 |
local_dir="./models",
|
29 |
)
|
30 |
hf_hub_download(
|
31 |
repo_id="bartowski/Dolphin3.0-Qwen2.5-0.5B-GGUF",
|
32 |
+
filename="Dolphin3.0-Qwen2.5-0.5B-Q6_K.gguf",
|
33 |
local_dir="./models",
|
34 |
)
|
35 |
|
|
|
145 |
additional_inputs=[
|
146 |
gr.Dropdown(
|
147 |
choices=[
|
148 |
+
"Dolphin3.0-Llama3.2-1B-Q6_K.gguf",
|
149 |
+
"Dolphin3.0-Qwen2.5-0.5B-Q6_K.gguf",
|
150 |
],
|
151 |
+
value="Dolphin3.0-Qwen2.5-0.5B-Q6_K.gguf",
|
152 |
label="Model",
|
153 |
info="Select the AI model to use for chat",
|
154 |
),
|