Tanuj commited on
Commit
770027a
Β·
1 Parent(s): 46353ef

Tweak gradio app + agent

Browse files
Files changed (2) hide show
  1. src/yt_agent/agent.py +1 -1
  2. src/yt_gradio/app.py +5 -5
src/yt_agent/agent.py CHANGED
@@ -8,7 +8,7 @@ from src.yt_agent.tools import retrieve_tool, inspect_database_tool
8
  # Load environment variables
9
  load_dotenv()
10
  # Initialize models
11
- model = InferenceClientModel(provider="nebius", model="nebius/Qwen/Qwen3-30B-A3B", api_key=os.environ["NEBIUS_API_KEY"])
12
  agent = CodeAgent(
13
  tools=[
14
  retrieve_tool,
 
8
  # Load environment variables
9
  load_dotenv()
10
  # Initialize models
11
+ model = InferenceClientModel(provider="nebius", model="nebius/deepseek-ai/DeepSeek-V3-0324-fast", api_key=os.environ["NEBIUS_API_KEY"])
12
  agent = CodeAgent(
13
  tools=[
14
  retrieve_tool,
src/yt_gradio/app.py CHANGED
@@ -32,7 +32,7 @@ def natural_language_handler(query: str) -> str:
32
  str: Simulated or generated action and result.
33
  """
34
  chunks, response = retrieve_chunks(query, limit=5)
35
- return f"πŸ’¬ Got {len(chunks)} chunks for your request: β€œ{query}”. Response: {response}"
36
 
37
  def agent_chat(message: str, chat_history):
38
  if not message.strip():
@@ -91,7 +91,7 @@ def get_gradio_blocks():
91
  with gr.Blocks(title="YouTwo Memory Agent Interface") as demo:
92
  gr.Markdown("## 🧠 YouTwo Memory Agent Interface\nBuilt with Gradio + MCP Support for LLM Tool Integration")
93
 
94
- with gr.Tab("πŸ—£οΈ Natural Language Mode"):
95
  gr.Markdown("Input natural language requests for system actions.")
96
  with gr.Row():
97
  user_query = gr.Textbox(label="Type your query")
@@ -100,13 +100,13 @@ def get_gradio_blocks():
100
  query_btn.click(fn=natural_language_handler, inputs=user_query, outputs=query_out)
101
 
102
  with gr.Tab("βš™οΈ Agentic Chat"):
103
- gr.Markdown("Agentic Question and Answer1")
104
- chatbot = gr.Chatbot(label="KG Agent", height=500, show_label=True, container=True, type="messages",
105
  bubble_full_width=False,
106
  value=[
107
  {"role": "assistant", "content": "πŸ‘‹ Hello! I'm the YouTwo Agent, your intelligent assistant for really good memory. How can I help you today?"}
108
  ])
109
- user_input = gr.Textbox(placeholder="Type your question...", label="Message", lines=2, scale=4, show_label=False, value="Inspect the vector database. What documents are there?")
110
  #clear_button = gr.Button("πŸ—‘οΈ Clear Chat", size="sm")
111
  send_btn = gr.Button("Send", variant="primary", scale=1)
112
 
 
32
  str: Simulated or generated action and result.
33
  """
34
  chunks, response = retrieve_chunks(query, limit=5)
35
+ return f"*Retrieved {len(chunks)} chunks.*\n------\n{response}"
36
 
37
  def agent_chat(message: str, chat_history):
38
  if not message.strip():
 
91
  with gr.Blocks(title="YouTwo Memory Agent Interface") as demo:
92
  gr.Markdown("## 🧠 YouTwo Memory Agent Interface\nBuilt with Gradio + MCP Support for LLM Tool Integration")
93
 
94
+ with gr.Tab("πŸ—£οΈ Grounded Q&A"):
95
  gr.Markdown("Input natural language requests for system actions.")
96
  with gr.Row():
97
  user_query = gr.Textbox(label="Type your query")
 
100
  query_btn.click(fn=natural_language_handler, inputs=user_query, outputs=query_out)
101
 
102
  with gr.Tab("βš™οΈ Agentic Chat"):
103
+ gr.Markdown("Memory Tool-powered Chat")
104
+ chatbot = gr.Chatbot(label="YT Agent", height=500, show_label=True, container=True, type="messages",
105
  bubble_full_width=False,
106
  value=[
107
  {"role": "assistant", "content": "πŸ‘‹ Hello! I'm the YouTwo Agent, your intelligent assistant for really good memory. How can I help you today?"}
108
  ])
109
+ user_input = gr.Textbox(placeholder="Type your question...", label="Message", lines=2, scale=4, show_label=False, value="Inspect the database, then tell me about the connection of psychotherapy and theater.")
110
  #clear_button = gr.Button("πŸ—‘οΈ Clear Chat", size="sm")
111
  send_btn = gr.Button("Send", variant="primary", scale=1)
112