Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -62,9 +62,9 @@ def run_code(code, groq_api_key):
|
|
62 |
# Extract the Llama Vision description from the API response
|
63 |
llamavision_description = llamavision_completion.choices[0].message.content
|
64 |
|
65 |
-
# Sending the plot image to Llama 3.
|
66 |
llama_completion = llama_client.chat.completions.create(
|
67 |
-
model="llama-3.
|
68 |
messages=[
|
69 |
{
|
70 |
"role": "system",
|
@@ -82,7 +82,7 @@ def run_code(code, groq_api_key):
|
|
82 |
stop=None,
|
83 |
)
|
84 |
|
85 |
-
# Extract the Llama 3.
|
86 |
llama_description = ""
|
87 |
for chunk in llama_completion:
|
88 |
llama_description += chunk.choices[0].delta.content or ""
|
@@ -116,7 +116,7 @@ with gr.Blocks(theme=gr.themes.Soft()) as demo:
|
|
116 |
|
117 |
with gr.Row():
|
118 |
output_llamavision_text = gr.Textbox(label="Description from Llama 3.2 Vision", interactive=False)
|
119 |
-
output_llama_text = gr.Textbox(label="Description from Llama 3.
|
120 |
|
121 |
submit_btn.click(
|
122 |
fn=run_code,
|
|
|
62 |
# Extract the Llama Vision description from the API response
|
63 |
llamavision_description = llamavision_completion.choices[0].message.content
|
64 |
|
65 |
+
# Sending the plot image to Llama 3.2 API to get the description
|
66 |
llama_completion = llama_client.chat.completions.create(
|
67 |
+
model="llama-3.2-90b-text-preview"",
|
68 |
messages=[
|
69 |
{
|
70 |
"role": "system",
|
|
|
82 |
stop=None,
|
83 |
)
|
84 |
|
85 |
+
# Extract the Llama 3.2 description from the API response
|
86 |
llama_description = ""
|
87 |
for chunk in llama_completion:
|
88 |
llama_description += chunk.choices[0].delta.content or ""
|
|
|
116 |
|
117 |
with gr.Row():
|
118 |
output_llamavision_text = gr.Textbox(label="Description from Llama 3.2 Vision", interactive=False)
|
119 |
+
output_llama_text = gr.Textbox(label="Description from Llama 3.2 Text", interactive=False)
|
120 |
|
121 |
submit_btn.click(
|
122 |
fn=run_code,
|