johann22 commited on
Commit
fcc8348
·
1 Parent(s): fe357e7

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +31 -21
app.py CHANGED
@@ -73,6 +73,29 @@ def run_gpt(in_prompt,history,model_drop):
73
  for response in stream:
74
  resp += response.token.text
75
  return resp
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
76
 
77
 
78
  def generate(purpose,history,chat_drop):
@@ -80,12 +103,13 @@ def generate(purpose,history,chat_drop):
80
  out_prompt = run_gpt(purpose,history,chat_drop)
81
  return out_prompt
82
 
83
- def describe(purpose,history,chat_drop):
84
  print (history)
85
- out_prompt = run_gpt(purpose,history,chat_drop)
 
86
  return out_prompt
87
 
88
- def run(purpose,history,model_drop,chat_drop,choice):
89
  if choice == "Generate":
90
  #out_img = infer(out_prompt)
91
  out_prompt=generate(purpose,history,chat_drop)
@@ -109,24 +133,10 @@ def run(purpose,history,model_drop,chat_drop,choice):
109
  yield ([(purpose,"an Error occured")],None)
110
  if choice == "Describe":
111
  #out_img = infer(out_prompt)
112
- out_prompt=describe(purpose,history,model_drop,chat_drop)
113
  history.append((purpose,out_prompt))
114
  yield (history,None)
115
- model=loaded_model[int(model_drop)]
116
- out_img=model(out_prompt)
117
- print(out_img)
118
- url=f'https://johann22-chat-diffusion-describe.hf.space/file={out_img}'
119
- print(url)
120
- uid = uuid.uuid4()
121
- #urllib.request.urlretrieve(image, 'tmp.png')
122
- #out=Image.open('tmp.png')
123
- r = requests.get(url, stream=True)
124
- if r.status_code == 200:
125
- out = Image.open(io.BytesIO(r.content))
126
- #yield ([(purpose,out_prompt)],out)
127
- yield (history,out)
128
- else:
129
- yield ([(purpose,"an Error occured")],None)
130
  ################################################
131
  style="""
132
  .top_head{
@@ -161,7 +171,7 @@ with gr.Blocks(css=style) as iface:
161
  clear = gr.ClearButton([msg, chatbot])
162
  with gr.Column(scale=2):
163
  sumbox=gr.Image(label="Image")
164
- sub_b = submit_b.click(run, [msg,chatbot,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
165
- sub_e = msg.submit(run, [msg, chatbot,model_drop,chat_model_drop,agent_choice], [chatbot,sumbox],concurrency_limit=20)
166
  stop_b.click(None,None,None, cancels=[sub_b,sub_e])
167
  iface.queue(default_concurrency_limit=None).launch()
 
73
  for response in stream:
74
  resp += response.token.text
75
  return resp
76
+
77
+ def run_idefics(in_prompt,history,model_drop):
78
+ client = InferenceClient("HuggingFaceM4/idefics-9b-instruct")
79
+ print(f'history :: {history}')
80
+ prompt=format_prompt(in_prompt,history)
81
+ seed = random.randint(1,1111111111111111)
82
+ print (seed)
83
+ generate_kwargs = dict(
84
+ temperature=1.0,
85
+ max_new_tokens=1048,
86
+ top_p=0.99,
87
+ repetition_penalty=1.0,
88
+ do_sample=True,
89
+ seed=seed,
90
+ )
91
+ content = agent.GENERATE_PROMPT + prompt
92
+ print(content)
93
+ stream = client.text_generation(content, **generate_kwargs, stream=True, details=True, return_full_text=False)
94
+ resp = ""
95
+ for response in stream:
96
+ resp += response.token.text
97
+ print (resp)
98
+ return resp
99
 
100
 
101
  def generate(purpose,history,chat_drop):
 
103
  out_prompt = run_gpt(purpose,history,chat_drop)
104
  return out_prompt
105
 
106
+ def describe(purpose,history,image,chat_drop):
107
  print (history)
108
+ purpose=f"{purpose},![]({image})"
109
+ out_prompt = run_idefics(purpose,history,chat_drop)
110
  return out_prompt
111
 
112
+ def run(purpose,history,image,model_drop,chat_drop,choice):
113
  if choice == "Generate":
114
  #out_img = infer(out_prompt)
115
  out_prompt=generate(purpose,history,chat_drop)
 
133
  yield ([(purpose,"an Error occured")],None)
134
  if choice == "Describe":
135
  #out_img = infer(out_prompt)
136
+ out_prompt=describe(purpose,history,image,model_drop,chat_drop)
137
  history.append((purpose,out_prompt))
138
  yield (history,None)
139
+
 
 
 
 
 
 
 
 
 
 
 
 
 
 
140
  ################################################
141
  style="""
142
  .top_head{
 
171
  clear = gr.ClearButton([msg, chatbot])
172
  with gr.Column(scale=2):
173
  sumbox=gr.Image(label="Image")
174
+ sub_b = submit_b.click(run, [msg,chatbot,sumbox,model_drop,chat_model_drop,agent_choice],[chatbot,sumbox],concurrency_limit=20)
175
+ sub_e = msg.submit(run, [msg, chatbot,sumbox,model_drop,chat_model_drop,agent_choice], [chatbot,sumbox],concurrency_limit=20)
176
  stop_b.click(None,None,None, cancels=[sub_b,sub_e])
177
  iface.queue(default_concurrency_limit=None).launch()