Spaces:
Runtime error
Runtime error
# Integrates all components into an agent | |
# import gradio as gr | |
import asyncio | |
from llama_index.core.workflow import Context | |
from llama_index.core.agent.workflow import AgentWorkflow, ToolCallResult, AgentStream | |
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI | |
from retriever import guest_info_retriever | |
from tools import get_weather_info, get_hub_stats, google_search | |
from dotenv import load_dotenv | |
llm = HuggingFaceInferenceAPI(model_name="Qwen/Qwen2.5-Coder-32B-Instruct") | |
alfred = AgentWorkflow.from_tools_or_functions( | |
[guest_info_retriever, get_weather_info, get_hub_stats, google_search], llm=llm | |
) | |
ctx = Context(alfred) | |
async def main(): | |
handler = alfred.run( | |
"Tell me about Lady Ada Lovelace.", | |
ctx=ctx, | |
) | |
async for ev in handler.stream_events(): | |
if isinstance(ev, ToolCallResult): | |
print("") | |
print("Called tool: ", ev.tool_name, ev.tool_kwargs, "=>", ev.tool_output) | |
elif isinstance(ev, AgentStream): # showing the thought process | |
print(ev.delta, end="", flush=True) | |
print("π© Alfred's Response:") | |
response = await handler | |
print(response) | |
handler2 = alfred.run("What projects is she currently working on?", ctx=ctx) | |
async for ev in handler2.stream_events(): | |
if isinstance(ev, ToolCallResult): | |
print("") | |
print("Called tool: ", ev.tool_name, ev.tool_kwargs, "=>", ev.tool_output) | |
elif isinstance(ev, AgentStream): # showing the thought process | |
print(ev.delta, end="", flush=True) | |
print("π© Alfred's Second Response:") | |
response2 = await handler2 | |
print(response2) | |
# demo = gr.Interface(fn=greet, inputs="text", outputs="text") | |
# demo.launch() | |
if __name__ == "__main__": | |
load_dotenv() | |
asyncio.run(main()) | |