aldigobbler commited on
Commit
f8b56ac
·
verified ·
1 Parent(s): e5e373e

Upload 2 files

Browse files
Files changed (2) hide show
  1. app.py +412 -0
  2. requirements.txt +4 -0
app.py ADDED
@@ -0,0 +1,412 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from openai import OpenAI
3
+ import json
4
+ from dotenv import load_dotenv
5
+ from gradio_client import Client
6
+ import gradio as gr
7
+
8
+ load_dotenv()
9
+
10
+ openai_client = OpenAI(
11
+ base_url="https://api.studio.nebius.com/v1/",
12
+ api_key=os.environ.get("NEBIUS_API_KEY")
13
+ )
14
+
15
+ uplink_client = Client("aldigobbler/uplink-mcp")
16
+
17
+ MODEL = "Qwen/Qwen2.5-72B-Instruct-fast"
18
+
19
+ def search_web(q, num=5, start=1, site=None, date_restrict=None):
20
+ """Search the web using the Uplink search endpoint"""
21
+ try:
22
+ print(f"Searching web for query: {q} with num={num}, start={start}, site={site}, date_restrict={date_restrict}")
23
+ result = uplink_client.predict(
24
+ q=q,
25
+ num=num,
26
+ start=start,
27
+ site=site,
28
+ date_restrict=date_restrict,
29
+ api_name="/search_endpoint"
30
+ )
31
+ return json.dumps(result)
32
+ except Exception as e:
33
+ return json.dumps({"error": str(e)})
34
+
35
+ def search_news(q, num=5):
36
+ """Search news using the Uplink news endpoint"""
37
+ try:
38
+ print(f"Searching news for query: {q} with num={num}")
39
+ result = uplink_client.predict(
40
+ q=q,
41
+ num=num,
42
+ api_name="/search_news_endpoint"
43
+ )
44
+ return json.dumps(result)
45
+ except Exception as e:
46
+ return json.dumps({"error": str(e)})
47
+
48
+ def scrape_url(url):
49
+ """Scrape content from a URL using the Uplink scrape endpoint"""
50
+ try:
51
+ print(f"Scraping URL: {url}")
52
+ result = uplink_client.predict(
53
+ url=url,
54
+ api_name="/scrape_endpoint"
55
+ )
56
+ return json.dumps(result)
57
+ except Exception as e:
58
+ return json.dumps({"error": str(e)})
59
+
60
+ tools = [
61
+ {
62
+ "type": "function",
63
+ "function": {
64
+ "name": "search_web",
65
+ "description": "Search the internet for information",
66
+ "parameters": {
67
+ "type": "object",
68
+ "properties": {
69
+ "q": {
70
+ "type": "string",
71
+ "description": "Search query",
72
+ },
73
+ "num": {
74
+ "type": "integer",
75
+ "description": "Number of results to return (default 5, max 5)",
76
+ "default": 5
77
+ },
78
+ "start": {
79
+ "type": "integer",
80
+ "description": "Starting index for results (default 1)",
81
+ "default": 1
82
+ },
83
+ "site": {
84
+ "type": "string",
85
+ "description": "Restrict search to specific site (optional)",
86
+ },
87
+ "date_restrict": {
88
+ "type": "string",
89
+ "description": "Date restriction: 'd1' (past day), 'w1' (past week), 'm1' (past month)",
90
+ "enum": ["d1", "w1", "m1"]
91
+ }
92
+ },
93
+ "required": ["q"],
94
+ },
95
+ },
96
+ },
97
+ {
98
+ "type": "function",
99
+ "function": {
100
+ "name": "search_news",
101
+ "description": "Search for recent news articles",
102
+ "parameters": {
103
+ "type": "object",
104
+ "properties": {
105
+ "q": {
106
+ "type": "string",
107
+ "description": "News search query",
108
+ },
109
+ "num": {
110
+ "type": "integer",
111
+ "description": "Number of results to return (default 5, max 5)",
112
+ "default": 5
113
+ }
114
+ },
115
+ "required": ["q"],
116
+ },
117
+ },
118
+ },
119
+ {
120
+ "type": "function",
121
+ "function": {
122
+ "name": "scrape_url",
123
+ "description": "Scrape content from a specific URL",
124
+ "parameters": {
125
+ "type": "object",
126
+ "properties": {
127
+ "url": {
128
+ "type": "string",
129
+ "description": "The URL to scrape content from",
130
+ }
131
+ },
132
+ "required": ["url"],
133
+ },
134
+ },
135
+ }
136
+ ]
137
+
138
+ available_functions = {
139
+ "search_web": search_web,
140
+ "search_news": search_news,
141
+ "scrape_url": scrape_url,
142
+ }
143
+
144
+ def execute_tool_call(tool_call):
145
+ """Execute a single tool call and return the result"""
146
+ function_name = tool_call.function.name
147
+ function_to_call = available_functions[function_name]
148
+ function_args = json.loads(tool_call.function.arguments)
149
+
150
+ if function_name == "search_web":
151
+ return function_to_call(
152
+ q=function_args.get("q"),
153
+ num=function_args.get("num", 5),
154
+ start=function_args.get("start", 1),
155
+ site=function_args.get("site"),
156
+ date_restrict=function_args.get("date_restrict")
157
+ )
158
+ elif function_name == "search_news":
159
+ return function_to_call(
160
+ q=function_args.get("q"),
161
+ num=function_args.get("num", 5)
162
+ )
163
+ elif function_name == "scrape_url":
164
+ return function_to_call(url=function_args.get("url"))
165
+
166
+ def submit_message(message, history):
167
+ """Wrapper function to handle message submission and clear textbox"""
168
+ if not message.strip():
169
+ return "", history
170
+
171
+ # start the chat and yield results
172
+ for result in chat(message, history):
173
+ yield result
174
+
175
+ def clear_textbox():
176
+ """Clear the textbox after submitting"""
177
+ return ""
178
+
179
+ def chat(message, history):
180
+ """Main chat function with streaming response"""
181
+ # gradio to openai
182
+ messages = [
183
+ {
184
+ "role": "system",
185
+ "content": "You are a helpful assistant with access to web search, news search and web scraping tools. Use these tools to help answer user questions comprehensively. Be concise but thorough in your responses. There is NO LaTeX support. You can use markdown, and please link URLs as references.",
186
+ }
187
+ ]
188
+
189
+ for msg in history:
190
+ if msg["role"] in ["user", "assistant"]:
191
+ messages.append(msg)
192
+
193
+ messages.append({"role": "user", "content": message})
194
+ history.append({"role": "user", "content": message})
195
+ history.append({"role": "assistant", "content": ""})
196
+
197
+ max_iterations = 10
198
+ iteration = 0
199
+
200
+ try:
201
+ while iteration < max_iterations:
202
+ iteration += 1
203
+
204
+ response = openai_client.chat.completions.create(
205
+ model=MODEL,
206
+ messages=messages,
207
+ stream=True,
208
+ tools=tools,
209
+ tool_choice="auto",
210
+ max_completion_tokens=4096
211
+ )
212
+
213
+ tool_calls = []
214
+ current_content = ""
215
+
216
+ for chunk in response:
217
+ try:
218
+ if chunk.choices[0].delta.content:
219
+ current_content += chunk.choices[0].delta.content
220
+ history[-1]["content"] = current_content
221
+ yield "", history
222
+
223
+ if chunk.choices[0].delta.tool_calls:
224
+ for tool_call in chunk.choices[0].delta.tool_calls:
225
+ if len(tool_calls) <= tool_call.index:
226
+ tool_calls.extend([None] * (tool_call.index + 1 - len(tool_calls)))
227
+
228
+ if tool_calls[tool_call.index] is None:
229
+ tool_calls[tool_call.index] = {
230
+ "id": tool_call.id,
231
+ "function": {"name": tool_call.function.name, "arguments": ""}
232
+ }
233
+
234
+ if tool_call.function.arguments:
235
+ tool_calls[tool_call.index]["function"]["arguments"] += tool_call.function.arguments
236
+
237
+ except GeneratorExit:
238
+ if current_content:
239
+ history[-1]["content"] = current_content + "\n\n⚠️ **Generation was cancelled**"
240
+ else:
241
+ history[-1]["content"] = "⚠️ **Generation was cancelled**"
242
+ return "", history
243
+ except Exception as e:
244
+ history[-1]["content"] = f"❌ **Error during generation**: {str(e)}"
245
+ return "", history
246
+
247
+ # we're done
248
+ if not any(tool_calls):
249
+ messages.append({"role": "assistant", "content": current_content})
250
+ break
251
+
252
+ # add current tool calls to messages
253
+ messages.append({
254
+ "role": "assistant",
255
+ "content": current_content,
256
+ "tool_calls": [{"id": tc["id"], "type": "function", "function": tc["function"]} for tc in tool_calls if tc]
257
+ })
258
+
259
+ # execute tool calls and show progress
260
+ for i, tool_call_data in enumerate(tool_calls):
261
+ if not tool_call_data:
262
+ continue
263
+
264
+ try:
265
+ class MockToolCall:
266
+ def __init__(self, data):
267
+ self.id = data["id"]
268
+ self.function = type('Function', (), {
269
+ 'name': data["function"]["name"],
270
+ 'arguments': data["function"]["arguments"]
271
+ })()
272
+
273
+ tool_call = MockToolCall(tool_call_data)
274
+ function_name = tool_call.function.name
275
+ function_args = json.loads(tool_call.function.arguments)
276
+
277
+ tool_message = f"Using **{function_name}** with: {json.dumps(function_args, indent=2)}"
278
+ history.append({"role": "assistant", "content": tool_message, "metadata": {"title": f"🛠️ Tool: {function_name}"}})
279
+ yield "", history
280
+
281
+ function_response = execute_tool_call(tool_call)
282
+
283
+ messages.append({
284
+ "tool_call_id": tool_call.id,
285
+ "role": "tool",
286
+ "name": function_name,
287
+ "content": function_response,
288
+ })
289
+
290
+ except GeneratorExit:
291
+ history.append({"role": "assistant", "content": "**Tool execution was cancelled**"})
292
+ return "", history
293
+ except Exception as e:
294
+ error_msg = f"❌ **Error executing {function_name}**: {str(e)}"
295
+ history.append({"role": "assistant", "content": error_msg})
296
+ yield "", history
297
+ continue
298
+
299
+ history = [msg for msg in history if not (msg.get("metadata") and "Tool:" in msg.get("metadata", {}).get("title", ""))]
300
+
301
+ except GeneratorExit:
302
+ if history and history[-1]["role"] == "assistant":
303
+ if not history[-1]["content"]:
304
+ history[-1]["content"] = "**Generation was cancelled**"
305
+ else:
306
+ history[-1]["content"] += "\n\n**Generation was cancelled**"
307
+ return "", history
308
+ except Exception as e:
309
+ error_message = f"**Unexpected error**: {str(e)}"
310
+ if history and history[-1]["role"] == "assistant":
311
+ history[-1]["content"] = error_message
312
+ else:
313
+ history.append({"role": "assistant", "content": error_message})
314
+ return "", history
315
+
316
+ return "", history
317
+
318
+ def create_demo():
319
+ with gr.Blocks(
320
+ title="Uplink Demo",
321
+ ) as demo:
322
+ gr.Markdown(
323
+ """
324
+ # Uplink Demo
325
+ **Powered by Qwen 2.5 and Uplink Search**
326
+
327
+ Uplink is an MCP server that has the following tools:
328
+ - 🔍 **Web search** (Google-like results)
329
+ - 📰 **News search** (Latest articles)
330
+ - 🌐 **Web scraping** (Extract content from URLs)
331
+ """
332
+ )
333
+
334
+ chatbot = gr.Chatbot(
335
+ type="messages",
336
+ height=600,
337
+ show_copy_button=True,
338
+ show_share_button=True,
339
+ avatar_images=("👤", "🤖"),
340
+ bubble_full_width=False,
341
+ )
342
+
343
+ msg = gr.Textbox(
344
+ placeholder="Ask me anything! I can search the web, calculate, and more...",
345
+ lines=2,
346
+ max_lines=10,
347
+ show_label=False,
348
+ submit_btn="Send"
349
+ )
350
+
351
+ with gr.Row():
352
+ submit_btn = gr.Button("💬 Send", variant="primary", scale=2)
353
+ stop_btn = gr.Button("⏹️ Stop", variant="stop", scale=1)
354
+ clear_btn = gr.ClearButton([msg, chatbot], value="🗑️ Clear Chat", scale=1)
355
+
356
+ with gr.Row():
357
+ with gr.Column(scale=1):
358
+ gr.Markdown("### 💡 Example Queries:")
359
+ gr.Examples(
360
+ examples=[
361
+ "What is 25 * 4 + 10?",
362
+ "Search for the latest news about artificial intelligence",
363
+ "How many seconds would it take for a leopard at full speed to run through Pont des Arts?",
364
+ "Find information about the Eiffel Tower and calculate how long it would take to walk around its base at 3 mph",
365
+ "What's the weather like in Tokyo today?",
366
+ "Scrape the content from https://www.example.com",
367
+ "Calculate the compound interest on $1000 at 5% for 10 years"
368
+ ],
369
+ inputs=msg,
370
+ label="Click on any example to try it:"
371
+ )
372
+
373
+ submit_event = msg.submit(
374
+ submit_message,
375
+ inputs=[msg, chatbot],
376
+ outputs=[msg, chatbot],
377
+ show_progress="minimal",
378
+ api_name="chat_submit"
379
+ )
380
+
381
+ click_event = submit_btn.click(
382
+ submit_message,
383
+ inputs=[msg, chatbot],
384
+ outputs=[msg, chatbot],
385
+ show_progress="minimal",
386
+ api_name="chat_click"
387
+ )
388
+
389
+ stop_btn.click(
390
+ None,
391
+ None,
392
+ None,
393
+ cancels=[submit_event, click_event],
394
+ api_name="stop_generation"
395
+ )
396
+
397
+ gr.Markdown(
398
+ """
399
+ ---
400
+ <div align="center">
401
+ <sub>Tools: Web Search • News Search • Web Scraping • Calculator</sub><br>
402
+ <sub>ßPowered by Qwen 2.5-72B-Instruct via Nebius AI</sub><br>
403
+ <sub>💡 Tip: Click the "Stop" button to cancel generation at any time</sub>
404
+ </div>
405
+ """
406
+ )
407
+
408
+ return demo
409
+
410
+ if __name__ == "__main__":
411
+ demo = create_demo()
412
+ demo.launch()
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ gradio
2
+ openai
3
+ gradio_client
4
+ python-dotenv