Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -11,14 +11,14 @@ import urllib.request
|
|
11 |
import uuid
|
12 |
import requests
|
13 |
import io
|
14 |
-
import
|
15 |
|
16 |
loaded_model=[]
|
17 |
chat_model=[]
|
18 |
for i,model in enumerate(models):
|
19 |
loaded_model.append(gr.load(f'models/{model}'))
|
20 |
print (loaded_model)
|
21 |
-
for i,model_c in enumerate(
|
22 |
chat_model.append(model_c)
|
23 |
print (chat_model)
|
24 |
now = datetime.now()
|
@@ -58,20 +58,20 @@ def run_gpt(in_prompt,history,model_drop):
|
|
58 |
return resp
|
59 |
|
60 |
|
61 |
-
def generate(purpose,history,
|
62 |
print (history)
|
63 |
-
out_prompt = run_gpt(purpose,history,
|
64 |
return out_prompt
|
65 |
|
66 |
-
def describe(purpose,history,
|
67 |
print (history)
|
68 |
-
out_prompt = run_gpt(purpose,history,
|
69 |
return out_prompt
|
70 |
|
71 |
def run(purpose,history,model_drop,chat_drop,choice):
|
72 |
if choice == "Generate":
|
73 |
#out_img = infer(out_prompt)
|
74 |
-
out_prompt=generate(purpose,history,
|
75 |
history.append((purpose,out_prompt))
|
76 |
yield (history,None)
|
77 |
model=loaded_model[int(model_drop)]
|
|
|
11 |
import uuid
|
12 |
import requests
|
13 |
import io
|
14 |
+
from chat_models import models as c_models
|
15 |
|
16 |
loaded_model=[]
|
17 |
chat_model=[]
|
18 |
for i,model in enumerate(models):
|
19 |
loaded_model.append(gr.load(f'models/{model}'))
|
20 |
print (loaded_model)
|
21 |
+
for i,model_c in enumerate(c_models):
|
22 |
chat_model.append(model_c)
|
23 |
print (chat_model)
|
24 |
now = datetime.now()
|
|
|
58 |
return resp
|
59 |
|
60 |
|
61 |
+
def generate(purpose,history,chat_drop):
|
62 |
print (history)
|
63 |
+
out_prompt = run_gpt(purpose,history,chat_drop)
|
64 |
return out_prompt
|
65 |
|
66 |
+
def describe(purpose,history,chat_drop):
|
67 |
print (history)
|
68 |
+
out_prompt = run_gpt(purpose,history,chat_drop)
|
69 |
return out_prompt
|
70 |
|
71 |
def run(purpose,history,model_drop,chat_drop,choice):
|
72 |
if choice == "Generate":
|
73 |
#out_img = infer(out_prompt)
|
74 |
+
out_prompt=generate(purpose,history,chat_drop)
|
75 |
history.append((purpose,out_prompt))
|
76 |
yield (history,None)
|
77 |
model=loaded_model[int(model_drop)]
|