awacke1 commited on
Commit
c4159b6
Β·
verified Β·
1 Parent(s): e9d0ab4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +66 -88
app.py CHANGED
@@ -35,7 +35,7 @@ Site_Name = 'Scholarly-Article-Document-Search-With-Memory'
35
  title="πŸ”¬πŸ§ ScienceBrain.AI"
36
  helpURL='https://huggingface.co/awacke1'
37
  bugURL='https://huggingface.co/spaces/awacke1'
38
- icons='πŸ”¬πŸ§ '
39
 
40
  st.set_page_config(
41
  page_title=title,
@@ -153,10 +153,6 @@ def generate_html(local_files):
153
  def search_arxiv(query):
154
  start_time = time.strftime("%Y-%m-%d %H:%M:%S")
155
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
156
- search_query = query
157
- search_source = "Arxiv Search - Latest - (EXPERIMENTAL)"
158
- llm_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
159
- #st.markdown('### πŸ”Ž ' + query)
160
 
161
  # Search 1 - Retrieve the Papers
162
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
@@ -167,19 +163,11 @@ def search_arxiv(query):
167
  "mistralai/Mixtral-8x7B-Instruct-v0.1",
168
  api_name="/update_with_rag_md"
169
  )
170
- lastpart = ''
171
- totalparts = ''
172
-
173
  Question = '### πŸ”Ž ' + query + '\r\n' # Format for markdown display with links
174
  References = response1[0]
175
- References2 = response1[1]
176
-
177
- #st.markdown(results)
178
 
179
  # URLs from the response
180
  ReferenceLinks = extract_urls(References)
181
- #st.markdown(urls)
182
- #results = results + urls
183
 
184
  RunSecondQuery = True
185
  if RunSecondQuery:
@@ -190,7 +178,6 @@ def search_arxiv(query):
190
  True,
191
  api_name="/ask_llm"
192
  )
193
- #st.markdown(response2)
194
  if len(response2) > 10:
195
  Answer = response2
196
  SpeechSynthesis(Answer)
@@ -208,14 +195,8 @@ def search_arxiv(query):
208
  st.write(f"Start time: {start_time}")
209
  st.write(f"Finish time: {end_time}")
210
  st.write(f"Elapsed time: {elapsed_seconds:.2f} seconds")
211
-
212
-
213
- #SpeechSynthesis(results)
214
  filename = generate_filename(query, "md")
215
  create_file(filename, query, results, should_save)
216
- #st.rerun() # refresh to see new files on sidebar
217
-
218
-
219
  return results
220
 
221
  def download_pdfs_and_generate_html(urls):
@@ -225,7 +206,6 @@ def download_pdfs_and_generate_html(urls):
225
  pdf_filename = os.path.basename(url)
226
  download_pdf(url, pdf_filename)
227
  pdf_links.append(pdf_filename)
228
-
229
  local_links_html = '<ul>'
230
  for link in pdf_links:
231
  local_links_html += f'<li><a href="{link}">{link}</a></li>'
@@ -236,66 +216,6 @@ def download_pdf(url, filename):
236
  response = requests.get(url)
237
  with open(filename, 'wb') as file:
238
  file.write(response.content)
239
-
240
-
241
-
242
- # Show ArXiv Scholary Articles! ----------------*************----▢️ Semantic and Episodic Memory System
243
- def search_arxiv_old(query):
244
- start_time = time.strftime("%Y-%m-%d %H:%M:%S")
245
-
246
-
247
- client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
248
- search_query = query
249
- search_source = "Arxiv Search - Latest - (EXPERIMENTAL)" # "Semantic Search - up to 10 Mar 2024"
250
- llm_model = "mistralai/Mixtral-8x7B-Instruct-v0.1"
251
- st.markdown('### πŸ”Ž ' + query)
252
-
253
- # Search 1 - Retrieve the Papers
254
-
255
-
256
- client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
257
- response1 = client.predict(
258
- query,
259
- 20,
260
- "Semantic Search - up to 10 Mar 2024", # Literal['Semantic Search - up to 10 Mar 2024', 'Arxiv Search - Latest - (EXPERIMENTAL)'] in 'Search Source' Dropdown component
261
- "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
262
- api_name="/update_with_rag_md"
263
- )
264
- lastpart=''
265
- totalparts=''
266
- results = response1[0] # Format for markdown display with links
267
- results2 = response1[1] # format for subquery without links
268
- st.markdown(results)
269
-
270
- RunSecondQuery = False
271
- if RunSecondQuery:
272
- # Search 2 - Retieve the Summary with Papers Context and Original Query
273
- response2 = client.predict(
274
- query, # str in 'parameter_13' Textbox component
275
- "mistralai/Mixtral-8x7B-Instruct-v0.1",
276
- #"mistralai/Mistral-7B-Instruct-v0.2",
277
- #"google/gemma-7b-it",
278
- True, # bool in 'Stream output' Checkbox component
279
- api_name="/ask_llm"
280
- )
281
- st.markdown(response2)
282
- results = results + response2
283
-
284
- st.write('πŸ”Run of Multi-Agent System Paper Summary Spec is Complete')
285
- end_time = time.strftime("%Y-%m-%d %H:%M:%S")
286
- start_timestamp = time.mktime(time.strptime(start_time, "%Y-%m-%d %H:%M:%S"))
287
- end_timestamp = time.mktime(time.strptime(end_time, "%Y-%m-%d %H:%M:%S"))
288
- elapsed_seconds = end_timestamp - start_timestamp
289
- st.write(f"Start time: {start_time}")
290
- st.write(f"Finish time: {end_time}")
291
- st.write(f"Elapsed time: {elapsed_seconds:.2f} seconds")
292
-
293
- SpeechSynthesis(results) # Search History Reader / Writer IO Memory - Audio at Same time as Reading.
294
-
295
- filename=generate_filename(query, "md")
296
- create_file(filename, query, results, should_save)
297
- return results
298
-
299
 
300
  # Prompts for App, for App Product, and App Product Code
301
  PromptPrefix = 'Create a specification with streamlit functions creating markdown outlines and tables rich with appropriate emojis for methodical step by step rules defining the concepts at play. Use story structure architect rules to plan, structure and write three dramatic situations to include in the rules and how to play by matching the theme for topic of '
@@ -704,7 +624,7 @@ def FileSidebar():
704
  if st.button(key='Runmd', label = buttonlabel):
705
  user_prompt = file_contents
706
  #try:
707
- search_glossary(file_contents)
708
  #except:
709
  #st.markdown('GPT is sleeping. Restart ETA 30 seconds.')
710
 
@@ -716,7 +636,8 @@ def FileSidebar():
716
  filesearch = PromptPrefix2 + file_content_area
717
  st.markdown(filesearch)
718
  if st.button(key=rerun, label='πŸ”Re-Code' ):
719
- search_glossary(filesearch)
 
720
 
721
  #except:
722
  #st.markdown('GPT is sleeping. Restart ETA 30 seconds.')
@@ -806,7 +727,7 @@ def load_score(key):
806
  return 0
807
 
808
 
809
- # πŸ”Run--------------------------------------------------------
810
  @st.cache_resource
811
  def search_glossary(query):
812
  #for category, terms in roleplaying_glossary.items():
@@ -827,7 +748,9 @@ def search_glossary(query):
827
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
828
  response2 = client.predict(
829
  query, # str in 'parameter_13' Textbox component
830
- "mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
 
 
831
  True, # bool in 'Stream output' Checkbox component
832
  api_name="/ask_llm"
833
  )
@@ -1466,6 +1389,7 @@ if GiveFeedback:
1466
  filename = generate_filename(raw, 'txt')
1467
  create_file(filename, raw, '', should_save)
1468
 
 
1469
  try:
1470
  query_params = st.query_params
1471
  query = (query_params.get('q') or query_params.get('query') or [''])
@@ -1558,6 +1482,59 @@ What is Lumiere?
1558
  What is SORA?
1559
  '''
1560
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1561
 
1562
  session_state = {}
1563
  if "search_queries" not in session_state:
@@ -1571,7 +1548,7 @@ if example_input:
1571
  if query:
1572
  result = search_arxiv(query)
1573
  #search_glossary(query)
1574
- search_glossary(result)
1575
  st.markdown(' ')
1576
 
1577
  #st.write("Search history:")
@@ -1629,10 +1606,10 @@ if AddAFileForContext:
1629
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
1630
 
1631
 
1632
- num_columns_video=st.slider(key="num_columns_video", label="Choose Number of Video Columns", min_value=1, max_value=15, value=4)
1633
  display_videos_and_links(num_columns_video) # Video Jump Grid
1634
 
1635
- num_columns_images=st.slider(key="num_columns_images", label="Choose Number of Image Columns", min_value=1, max_value=15, value=4)
1636
  display_images_and_wikipedia_summaries(num_columns_images) # Image Jump Grid
1637
 
1638
  display_glossary_grid(roleplaying_glossary) # Word Glossary Jump Grid - Dynamically calculates columns based on details length to keep topic together
@@ -1640,3 +1617,4 @@ display_glossary_grid(roleplaying_glossary) # Word Glossary Jump Grid - Dynamic
1640
  num_columns_text=st.slider(key="num_columns_text", label="Choose Number of Text Columns", min_value=1, max_value=15, value=4)
1641
  display_buttons_with_scores(num_columns_text) # Feedback Jump Grid
1642
 
 
 
35
  title="πŸ”¬πŸ§ ScienceBrain.AI"
36
  helpURL='https://huggingface.co/awacke1'
37
  bugURL='https://huggingface.co/spaces/awacke1'
38
+ icons='πŸ”¬'
39
 
40
  st.set_page_config(
41
  page_title=title,
 
153
  def search_arxiv(query):
154
  start_time = time.strftime("%Y-%m-%d %H:%M:%S")
155
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
 
 
 
 
156
 
157
  # Search 1 - Retrieve the Papers
158
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
 
163
  "mistralai/Mixtral-8x7B-Instruct-v0.1",
164
  api_name="/update_with_rag_md"
165
  )
 
 
 
166
  Question = '### πŸ”Ž ' + query + '\r\n' # Format for markdown display with links
167
  References = response1[0]
 
 
 
168
 
169
  # URLs from the response
170
  ReferenceLinks = extract_urls(References)
 
 
171
 
172
  RunSecondQuery = True
173
  if RunSecondQuery:
 
178
  True,
179
  api_name="/ask_llm"
180
  )
 
181
  if len(response2) > 10:
182
  Answer = response2
183
  SpeechSynthesis(Answer)
 
195
  st.write(f"Start time: {start_time}")
196
  st.write(f"Finish time: {end_time}")
197
  st.write(f"Elapsed time: {elapsed_seconds:.2f} seconds")
 
 
 
198
  filename = generate_filename(query, "md")
199
  create_file(filename, query, results, should_save)
 
 
 
200
  return results
201
 
202
  def download_pdfs_and_generate_html(urls):
 
206
  pdf_filename = os.path.basename(url)
207
  download_pdf(url, pdf_filename)
208
  pdf_links.append(pdf_filename)
 
209
  local_links_html = '<ul>'
210
  for link in pdf_links:
211
  local_links_html += f'<li><a href="{link}">{link}</a></li>'
 
216
  response = requests.get(url)
217
  with open(filename, 'wb') as file:
218
  file.write(response.content)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
219
 
220
  # Prompts for App, for App Product, and App Product Code
221
  PromptPrefix = 'Create a specification with streamlit functions creating markdown outlines and tables rich with appropriate emojis for methodical step by step rules defining the concepts at play. Use story structure architect rules to plan, structure and write three dramatic situations to include in the rules and how to play by matching the theme for topic of '
 
624
  if st.button(key='Runmd', label = buttonlabel):
625
  user_prompt = file_contents
626
  #try:
627
+ #search_glossary(file_contents)
628
  #except:
629
  #st.markdown('GPT is sleeping. Restart ETA 30 seconds.')
630
 
 
636
  filesearch = PromptPrefix2 + file_content_area
637
  st.markdown(filesearch)
638
  if st.button(key=rerun, label='πŸ”Re-Code' ):
639
+ #search_glossary(filesearch)
640
+ search_arxiv(filesearch)
641
 
642
  #except:
643
  #st.markdown('GPT is sleeping. Restart ETA 30 seconds.')
 
727
  return 0
728
 
729
 
730
+ # πŸ”Search Glossary
731
  @st.cache_resource
732
  def search_glossary(query):
733
  #for category, terms in roleplaying_glossary.items():
 
748
  client = Client("awacke1/Arxiv-Paper-Search-And-QA-RAG-Pattern")
749
  response2 = client.predict(
750
  query, # str in 'parameter_13' Textbox component
751
+ #"mistralai/Mixtral-8x7B-Instruct-v0.1", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
752
+ #"mistralai/Mistral-7B-Instruct-v0.2", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
753
+ "google/gemma-7b-it", # Literal['mistralai/Mixtral-8x7B-Instruct-v0.1', 'mistralai/Mistral-7B-Instruct-v0.2', 'google/gemma-7b-it', 'None'] in 'LLM Model' Dropdown component
754
  True, # bool in 'Stream output' Checkbox component
755
  api_name="/ask_llm"
756
  )
 
1389
  filename = generate_filename(raw, 'txt')
1390
  create_file(filename, raw, '', should_save)
1391
 
1392
+ # βš™οΈq= Run ArXiv search from query parameters
1393
  try:
1394
  query_params = st.query_params
1395
  query = (query_params.get('q') or query_params.get('query') or [''])
 
1482
  What is SORA?
1483
  '''
1484
 
1485
+ import streamlit as st
1486
+
1487
+ personality_factors = """
1488
+ 1. 🌈 Openness (Being open to new things)
1489
+ - 🎭 Imagination (Enjoying fantasy and daydreaming)
1490
+ - 🎨 Artistic Interests (Appreciating beauty and art)
1491
+ - 🎸 Creativity (Coming up with new ideas)
1492
+ - 🌍 Curiosity (Wanting to explore and learn)
1493
+ - 🌿 Unconventional (Being different and unique)
1494
+ - 🧩 Complexity (Enjoying deep thoughts and feelings)
1495
+ - 🌌 Adventurousness (Seeking new experiences)
1496
+
1497
+ 2. πŸ’Ό Conscientiousness (Being organized and reliable)
1498
+ - 🎯 Competence (Feeling capable and effective)
1499
+ - πŸ“Š Orderliness (Keeping things neat and tidy)
1500
+ - πŸ“… Dutifulness (Following rules and doing what's right)
1501
+ - πŸ† Achievement (Working hard to reach goals)
1502
+ - πŸ§˜β€β™€οΈ Self-Discipline (Staying focused and in control)
1503
+ - πŸ€” Thoughtfulness (Thinking before acting)
1504
+ - πŸ•°οΈ Time Management (Using time wisely)
1505
+ - 🧽 Perfectionism (Wanting things to be just right)
1506
+
1507
+ 3. πŸŽ‰ Extraversion (Being outgoing and social)
1508
+ - πŸ€— Friendliness (Being kind and welcoming)
1509
+ - πŸ‘₯ Sociability (Enjoying being with others)
1510
+ - πŸ—£οΈ Assertiveness (Speaking up and taking charge)
1511
+ - ⚑ Energy (Being active and lively)
1512
+ - 🎒 Excitement (Seeking thrills and fun)
1513
+ - 😊 Cheerfulness (Feeling happy and positive)
1514
+ - 🎀 Talkativeness (Enjoying conversation)
1515
+ - 🌞 Enthusiasm (Showing excitement and interest)
1516
+
1517
+ 4. 🀝 Agreeableness (Being kind and cooperative)
1518
+ - 🀲 Trust (Believing in others' goodness)
1519
+ - 🌿 Honesty (Being truthful and sincere)
1520
+ - 🀝 Cooperation (Working well with others)
1521
+ - 🌸 Helpfulness (Being generous and caring)
1522
+ - πŸ•ŠοΈ Compliance (Following rules and respecting authority)
1523
+ - πŸ™ Modesty (Being humble and down-to-earth)
1524
+ - πŸ’• Empathy (Understanding others' feelings)
1525
+ - πŸ«‚ Compassion (Caring about others' well-being)
1526
+
1527
+ 5. πŸ˜” Neuroticism (Feeling negative emotions easily)
1528
+ - 😰 Anxiety (Worrying and feeling nervous)
1529
+ - 😑 Anger (Getting upset and frustrated)
1530
+ - 😒 Sadness (Feeling down and unhappy)
1531
+ - 😳 Self-Consciousness (Feeling shy and uneasy)
1532
+ - 🎒 Impulsiveness (Acting without thinking)
1533
+ - πŸƒ Vulnerability (Being easily hurt or upset)
1534
+ - πŸŒͺ️ Moodiness (Having ups and downs in feelings)
1535
+ - 🎭 Negativity (Focusing on the bad side of things)
1536
+ """
1537
+
1538
 
1539
  session_state = {}
1540
  if "search_queries" not in session_state:
 
1548
  if query:
1549
  result = search_arxiv(query)
1550
  #search_glossary(query)
1551
+ #search_glossary(result)
1552
  st.markdown(' ')
1553
 
1554
  #st.write("Search history:")
 
1606
  st.sidebar.markdown(get_table_download_link(filename), unsafe_allow_html=True)
1607
 
1608
 
1609
+ num_columns_video=st.slider(key="num_columns_video", label="Choose Number of Video Columns", min_value=1, max_value=15, value=2)
1610
  display_videos_and_links(num_columns_video) # Video Jump Grid
1611
 
1612
+ num_columns_images=st.slider(key="num_columns_images", label="Choose Number of Image Columns", min_value=1, max_value=15, value=2)
1613
  display_images_and_wikipedia_summaries(num_columns_images) # Image Jump Grid
1614
 
1615
  display_glossary_grid(roleplaying_glossary) # Word Glossary Jump Grid - Dynamically calculates columns based on details length to keep topic together
 
1617
  num_columns_text=st.slider(key="num_columns_text", label="Choose Number of Text Columns", min_value=1, max_value=15, value=4)
1618
  display_buttons_with_scores(num_columns_text) # Feedback Jump Grid
1619
 
1620
+ st.markdown(personality_factors)