obocloud commited on
Commit
3a3d959
·
verified ·
1 Parent(s): 1792e28

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -27
app.py CHANGED
@@ -1,21 +1,13 @@
1
- from smolagents import CodeAgent,DuckDuckGoSearchTool, HfApiModel,load_tool,tool
2
- import datetime
3
  import requests
4
- import pytz
5
  import yaml
6
  from tools.final_answer import FinalAnswerTool
7
-
8
  from Gradio_UI import GradioUI
9
 
10
  @tool
11
  def get_richest_people(_: str) -> str:
12
- """Fetches a list of the richest people in the world from Bloomberg Billionaires Index.
13
- Args:
14
- _: Placeholder argument, not used.
15
- """
16
- import requests
17
- from bs4 import BeautifulSoup
18
-
19
  url = "https://www.bloomberg.com/billionaires/"
20
  headers = {
21
  "User-Agent": "Mozilla/5.0"
@@ -23,6 +15,7 @@ def get_richest_people(_: str) -> str:
23
 
24
  try:
25
  response = requests.get(url, headers=headers)
 
26
  soup = BeautifulSoup(response.text, "html.parser")
27
 
28
  people = []
@@ -32,42 +25,35 @@ def get_richest_people(_: str) -> str:
32
  people.append(f"{name} - {net_worth}")
33
 
34
  if not people:
35
- return "Unable to fetch data from Bloomberg at this time."
 
36
  return "Top richest people in the world (Bloomberg):\n" + "\n".join(people)
37
 
 
 
38
  except Exception as e:
39
  return f"Failed to fetch data: {str(e)}"
40
-
41
- final_answer = FinalAnswerTool()
42
 
43
- # If the agent does not answer, the model is overloaded, please use another model or the following Hugging Face Endpoint that also contains qwen2.5 coder:
44
- # model_id='https://pflgm2locj2t89co.us-east-1.aws.endpoints.huggingface.cloud'
45
 
46
  model = HfApiModel(
47
- max_tokens=2096,
48
- temperature=0.5,
49
- model_id='Qwen/Qwen2.5-Coder-32B-Instruct',# it is possible that this model may be overloaded
50
- custom_role_conversions=None,
51
  )
52
 
53
-
54
  # Import tool from Hub
55
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
56
 
57
  with open("prompts.yaml", 'r') as stream:
58
  prompt_templates = yaml.safe_load(stream)
59
-
60
  agent = CodeAgent(
61
  model=model,
62
  tools=[final_answer, get_richest_people],
63
  max_steps=6,
64
  verbosity_level=1,
65
- grammar=None,
66
- planning_interval=None,
67
- name=None,
68
- description=None,
69
  prompt_templates=prompt_templates
70
  )
71
 
72
-
73
  GradioUI(agent).launch()
 
1
+ from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel, load_tool, tool
 
2
  import requests
3
+ from bs4 import BeautifulSoup
4
  import yaml
5
  from tools.final_answer import FinalAnswerTool
 
6
  from Gradio_UI import GradioUI
7
 
8
  @tool
9
  def get_richest_people(_: str) -> str:
10
+ """Fetches a list of the richest people in the world from Bloomberg Billionaires Index."""
 
 
 
 
 
 
11
  url = "https://www.bloomberg.com/billionaires/"
12
  headers = {
13
  "User-Agent": "Mozilla/5.0"
 
15
 
16
  try:
17
  response = requests.get(url, headers=headers)
18
+ response.raise_for_status() # Raise an error for bad responses
19
  soup = BeautifulSoup(response.text, "html.parser")
20
 
21
  people = []
 
25
  people.append(f"{name} - {net_worth}")
26
 
27
  if not people:
28
+ return "Unable to fetch data from Bloomberg at this time."
29
+
30
  return "Top richest people in the world (Bloomberg):\n" + "\n".join(people)
31
 
32
+ except requests.RequestException as req_err:
33
+ return f"Network error: {str(req_err)}"
34
  except Exception as e:
35
  return f"Failed to fetch data: {str(e)}"
 
 
36
 
37
+ final_answer = FinalAnswerTool()
 
38
 
39
  model = HfApiModel(
40
+ max_tokens=2096,
41
+ temperature=0.5,
42
+ model_id='Qwen/Qwen2.5-Coder-32B-Instruct',
 
43
  )
44
 
 
45
  # Import tool from Hub
46
  image_generation_tool = load_tool("agents-course/text-to-image", trust_remote_code=True)
47
 
48
  with open("prompts.yaml", 'r') as stream:
49
  prompt_templates = yaml.safe_load(stream)
50
+
51
  agent = CodeAgent(
52
  model=model,
53
  tools=[final_answer, get_richest_people],
54
  max_steps=6,
55
  verbosity_level=1,
 
 
 
 
56
  prompt_templates=prompt_templates
57
  )
58
 
 
59
  GradioUI(agent).launch()