Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -96,7 +96,8 @@ def run_gpt(in_prompt,history,model_drop,seed):
|
|
96 |
resp += response.token.text
|
97 |
return resp
|
98 |
|
99 |
-
def run_idefics(in_prompt,image,
|
|
|
100 |
client = InferenceClient("HuggingFaceM4/idefics-9b-instruct")
|
101 |
print(f'history :: {history}')
|
102 |
prompt=format_prompt(in_prompt,history,seed)
|
@@ -110,9 +111,12 @@ def run_idefics(in_prompt,image,history,model_drop,seed):
|
|
110 |
do_sample=True,
|
111 |
seed=seed,
|
112 |
)
|
113 |
-
content = agent.IDEFICS_PROMPT + prompt
|
|
|
|
|
|
|
114 |
print(content)
|
115 |
-
stream = client.text_generation(
|
116 |
resp = ""
|
117 |
for response in stream:
|
118 |
resp += response.token.text
|
@@ -127,8 +131,8 @@ def generate(purpose,history,chat_drop,seed):
|
|
127 |
|
128 |
def describe(purpose,history,image,chat_drop,seed):
|
129 |
print (history)
|
130 |
-
purpose=f"{purpose},"
|
131 |
-
out_prompt = run_idefics(purpose,history,chat_drop,seed)
|
132 |
return out_prompt
|
133 |
|
134 |
def run(purpose,history,image,model_drop,chat_drop,choice,seed):
|
|
|
96 |
resp += response.token.text
|
97 |
return resp
|
98 |
|
99 |
+
def run_idefics(in_prompt,history,image,model_drop,seed):
|
100 |
+
send_list=[]
|
101 |
client = InferenceClient("HuggingFaceM4/idefics-9b-instruct")
|
102 |
print(f'history :: {history}')
|
103 |
prompt=format_prompt(in_prompt,history,seed)
|
|
|
111 |
do_sample=True,
|
112 |
seed=seed,
|
113 |
)
|
114 |
+
#content = agent.IDEFICS_PROMPT + prompt
|
115 |
+
send_list.append(agent.IDEFICS_PROMPT)
|
116 |
+
send_list.append(prompt)
|
117 |
+
send_list.append(image)
|
118 |
print(content)
|
119 |
+
stream = client.text_generation(send_list, **generate_kwargs, stream=True, details=True, return_full_text=False)
|
120 |
resp = ""
|
121 |
for response in stream:
|
122 |
resp += response.token.text
|
|
|
131 |
|
132 |
def describe(purpose,history,image,chat_drop,seed):
|
133 |
print (history)
|
134 |
+
#purpose=f"{purpose},"
|
135 |
+
out_prompt = run_idefics(purpose,history,image,chat_drop,seed)
|
136 |
return out_prompt
|
137 |
|
138 |
def run(purpose,history,image,model_drop,chat_drop,choice,seed):
|