Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -60,31 +60,13 @@ def run_gpt(in_prompt,history,model_drop):
|
|
60 |
|
61 |
def generate(purpose,history,model_drop,chat_drop):
|
62 |
print (history)
|
63 |
-
task=None
|
64 |
-
#if history:
|
65 |
-
# history=str(history).strip("[]")
|
66 |
-
#if not history:
|
67 |
-
# history = ""
|
68 |
-
|
69 |
-
#action_name, action_input = parse_action(line)
|
70 |
out_prompt = run_gpt(purpose,history,model_drop)
|
71 |
-
|
72 |
-
|
73 |
-
return (history,None)
|
74 |
-
|
75 |
def describe(purpose,history,model_drop,chat_drop):
|
76 |
print (history)
|
77 |
-
|
78 |
-
|
79 |
-
# history=str(history).strip("[]")
|
80 |
-
#if not history:
|
81 |
-
# history = ""
|
82 |
-
|
83 |
-
#action_name, action_input = parse_action(line)
|
84 |
-
out_prompt = run_gpt(purpose,history,model_drop)
|
85 |
-
#yield ([(purpose,out_prompt)],None)
|
86 |
-
history.append((purpose,out_prompt))
|
87 |
-
return (history,None)
|
88 |
|
89 |
def run(purpose,history,model_drop,chat_drop,choice):
|
90 |
if choice == "Generate":
|
@@ -92,6 +74,8 @@ def run(purpose,history,model_drop,chat_drop,choice):
|
|
92 |
out_prompt=generate(purpose,history,model_drop,chat_drop)
|
93 |
model=loaded_model[int(model_drop)]
|
94 |
out_img=model(out_prompt)
|
|
|
|
|
95 |
print(out_img)
|
96 |
url=f'https://johann22-chat-diffusion-describe.hf.space/file={out_img}'
|
97 |
print(url)
|
@@ -108,6 +92,7 @@ def run(purpose,history,model_drop,chat_drop,choice):
|
|
108 |
if choice == "Describe":
|
109 |
#out_img = infer(out_prompt)
|
110 |
out_prompt=describe(purpose,history,model_drop,chat_drop)
|
|
|
111 |
model=loaded_model[int(model_drop)]
|
112 |
out_img=model(out_prompt)
|
113 |
print(out_img)
|
|
|
60 |
|
61 |
def generate(purpose,history,model_drop,chat_drop):
|
62 |
print (history)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
63 |
out_prompt = run_gpt(purpose,history,model_drop)
|
64 |
+
return out_prompt
|
65 |
+
|
|
|
|
|
66 |
def describe(purpose,history,model_drop,chat_drop):
|
67 |
print (history)
|
68 |
+
out_prompt = run_gpt(purpose,history,model_drop)
|
69 |
+
return out_prompt
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
70 |
|
71 |
def run(purpose,history,model_drop,chat_drop,choice):
|
72 |
if choice == "Generate":
|
|
|
74 |
out_prompt=generate(purpose,history,model_drop,chat_drop)
|
75 |
model=loaded_model[int(model_drop)]
|
76 |
out_img=model(out_prompt)
|
77 |
+
history.append((purpose,out_prompt))
|
78 |
+
#return (history,None)
|
79 |
print(out_img)
|
80 |
url=f'https://johann22-chat-diffusion-describe.hf.space/file={out_img}'
|
81 |
print(url)
|
|
|
92 |
if choice == "Describe":
|
93 |
#out_img = infer(out_prompt)
|
94 |
out_prompt=describe(purpose,history,model_drop,chat_drop)
|
95 |
+
history.append((purpose,out_prompt))
|
96 |
model=loaded_model[int(model_drop)]
|
97 |
out_img=model(out_prompt)
|
98 |
print(out_img)
|