Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
add null template
Browse files- .gitignore +2 -1
- app.py +13 -3
- utils.py +1 -0
.gitignore
CHANGED
|
@@ -1,7 +1,8 @@
|
|
| 1 |
en_core_web_sm-3.6.0/*
|
| 2 |
daily_receive.*
|
| 3 |
weekly_train_generate.*
|
| 4 |
-
app[2-
|
|
|
|
| 5 |
test_gradio_client.py
|
| 6 |
boxes.py
|
| 7 |
*.tar.gz
|
|
|
|
| 1 |
en_core_web_sm-3.6.0/*
|
| 2 |
daily_receive.*
|
| 3 |
weekly_train_generate.*
|
| 4 |
+
app[2-4].py
|
| 5 |
+
app_test.py
|
| 6 |
test_gradio_client.py
|
| 7 |
boxes.py
|
| 8 |
*.tar.gz
|
app.py
CHANGED
|
@@ -4,6 +4,8 @@ import gradio as gr
|
|
| 4 |
import os
|
| 5 |
import json
|
| 6 |
import requests
|
|
|
|
|
|
|
| 7 |
from utils import is_chinese, process_image_without_resize, parse_response, templates_agent_cogagent, template_grounding_cogvlm, postprocess_text
|
| 8 |
|
| 9 |
DESCRIPTION = '''<h2 style='text-align: center'> <a href="https://github.com/THUDM/CogVLM"> CogVLM & CogAgent Chat Demo</a> </h2>'''
|
|
@@ -22,6 +24,10 @@ default_chatbox = [("", "Hi, What do you want to know about this image?")]
|
|
| 22 |
URL = os.environ.get("URL")
|
| 23 |
|
| 24 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
def post(
|
| 26 |
input_text,
|
| 27 |
temperature,
|
|
@@ -77,7 +83,7 @@ def post(
|
|
| 77 |
input_text = postprocess_text(grounding_template, input_text)
|
| 78 |
elif cogagent:
|
| 79 |
model_use = "agent_chat"
|
| 80 |
-
if agent_template:
|
| 81 |
input_text = postprocess_text(agent_template, input_text)
|
| 82 |
|
| 83 |
prompt = input_text
|
|
@@ -99,7 +105,11 @@ def post(
|
|
| 99 |
'max_new_tokens': 2048
|
| 100 |
})
|
| 101 |
try:
|
| 102 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
| 103 |
except Exception as e:
|
| 104 |
print("error message", e)
|
| 105 |
if is_zh:
|
|
@@ -190,7 +200,7 @@ def main():
|
|
| 190 |
print(gr.__version__)
|
| 191 |
|
| 192 |
demo.queue(concurrency_count=10)
|
| 193 |
-
demo.launch()
|
| 194 |
|
| 195 |
if __name__ == '__main__':
|
| 196 |
main()
|
|
|
|
| 4 |
import os
|
| 5 |
import json
|
| 6 |
import requests
|
| 7 |
+
import time
|
| 8 |
+
from concurrent.futures import ThreadPoolExecutor
|
| 9 |
from utils import is_chinese, process_image_without_resize, parse_response, templates_agent_cogagent, template_grounding_cogvlm, postprocess_text
|
| 10 |
|
| 11 |
DESCRIPTION = '''<h2 style='text-align: center'> <a href="https://github.com/THUDM/CogVLM"> CogVLM & CogAgent Chat Demo</a> </h2>'''
|
|
|
|
| 24 |
URL = os.environ.get("URL")
|
| 25 |
|
| 26 |
|
| 27 |
+
def make_request(URL, headers, data):
|
| 28 |
+
response = requests.request("POST", URL, headers=headers, data=data, timeout=(60, 100))
|
| 29 |
+
return response.json()
|
| 30 |
+
|
| 31 |
def post(
|
| 32 |
input_text,
|
| 33 |
temperature,
|
|
|
|
| 83 |
input_text = postprocess_text(grounding_template, input_text)
|
| 84 |
elif cogagent:
|
| 85 |
model_use = "agent_chat"
|
| 86 |
+
if agent_template is not None and agent_template != "do not use template":
|
| 87 |
input_text = postprocess_text(agent_template, input_text)
|
| 88 |
|
| 89 |
prompt = input_text
|
|
|
|
| 105 |
'max_new_tokens': 2048
|
| 106 |
})
|
| 107 |
try:
|
| 108 |
+
with ThreadPoolExecutor(max_workers=1) as executor:
|
| 109 |
+
future = executor.submit(make_request, URL, headers, data)
|
| 110 |
+
# time.sleep(15)
|
| 111 |
+
response = future.result() # Blocks until the request is complete
|
| 112 |
+
# response = requests.request("POST", URL, headers=headers, data=data, timeout=(60, 100)).json()
|
| 113 |
except Exception as e:
|
| 114 |
print("error message", e)
|
| 115 |
if is_zh:
|
|
|
|
| 200 |
print(gr.__version__)
|
| 201 |
|
| 202 |
demo.queue(concurrency_count=10)
|
| 203 |
+
demo.launch(server_port=7862)
|
| 204 |
|
| 205 |
if __name__ == '__main__':
|
| 206 |
main()
|
utils.py
CHANGED
|
@@ -121,6 +121,7 @@ def postprocess_text(template, text):
|
|
| 121 |
|
| 122 |
# The templates is for CogAgent_Agent Template
|
| 123 |
templates_agent_cogagent = [
|
|
|
|
| 124 |
"Can you advise me on how to <TASK>?",
|
| 125 |
"I'm looking for guidance on how to <TASK>.",
|
| 126 |
"What steps do I need to take to <TASK>?",
|
|
|
|
| 121 |
|
| 122 |
# The templates is for CogAgent_Agent Template
|
| 123 |
templates_agent_cogagent = [
|
| 124 |
+
"do not use template",
|
| 125 |
"Can you advise me on how to <TASK>?",
|
| 126 |
"I'm looking for guidance on how to <TASK>.",
|
| 127 |
"What steps do I need to take to <TASK>?",
|