Spaces:
Sleeping
Sleeping
added gr.Accordion to hide info text
Browse files
app.py
CHANGED
@@ -83,33 +83,25 @@ rag_chain = create_retrieval_chain(multi_query_retriever, question_answer_chain)
|
|
83 |
|
84 |
# Gradio interface
|
85 |
info_text = """
|
86 |
-
# Welcome to My Bookmarks Chatbot
|
87 |
by [Mark Redito](https://markredito.com)
|
88 |
-
|
89 |
-
This chatbot has access to my browser bookmarks from 2020 to mid-2024. It covers a variety of topics I’m interested in, including Art, Technology, and Culture.
|
90 |
|
91 |
## You can use it in a few ways:
|
92 |
-
|
93 |
- Extract specific links. For example: "Give me the links about Ethereum"
|
94 |
- Get summaries of bookmarked content. Try: "Summarize 'How to do great work' by Paul Graham"
|
95 |
- Ask general questions on various topics. Like: "What goes into a typical music recording contract?"
|
96 |
|
97 |
## Here's a quick rundown of how it works behind the scenes:
|
98 |
-
|
99 |
- The system uses RAG (Retrieval-Augmented Generation) with a framework called Langchain. Basically, it helps the chatbot find and use relevant information.
|
100 |
- The bookmarks are stored in a database called FAISS that makes searching super fast.
|
101 |
- The brains of the operation is Claude 3 Haiku, a small and fast AI model by Anthropic.
|
102 |
- When you ask a question, the system comes up with a few more related questions to help find the right links. It then searches the database and passes the best information to Claude to craft your answer.
|
103 |
|
104 |
Keep in mind, if the chatbot can't find good information to answer your question, it'll let you know by saying something like "I don't know" or "I can't find it." And like any AI, it might make mistakes sometimes.
|
105 |
-
|
106 |
This is mostly a fun project I put together for my own curiosity and enjoyment. While I can't make any promises about its performance, I hope you have fun exploring and maybe discover something interesting! Enjoy!
|
107 |
"""
|
108 |
|
109 |
-
info_text_component = Markdown(info_text) # Load it once
|
110 |
-
|
111 |
# The respond function
|
112 |
-
|
113 |
def respond(message, history, max_tokens, temperature, top_p):
|
114 |
# Process user message through RAG chain
|
115 |
response = rag_chain.invoke({"input": message})
|
@@ -125,24 +117,28 @@ def chat_response(message, history, max_tokens, temperature, top_p):
|
|
125 |
bot_message = respond(message, history, max_tokens, temperature, top_p)
|
126 |
return bot_message
|
127 |
|
|
|
128 |
with gr.Blocks(fill_height=True) as demo:
|
129 |
-
|
|
|
|
|
|
|
|
|
|
|
130 |
|
131 |
with gr.Accordion("Advanced Options", open=False):
|
132 |
max_tokens_slider = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
133 |
temperature_slider = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
|
134 |
top_p_slider = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
135 |
|
|
|
136 |
gr.ChatInterface(
|
137 |
fn=chat_response,
|
138 |
additional_inputs=[max_tokens_slider, temperature_slider, top_p_slider],
|
139 |
-
#title="RAG Chatbot",
|
140 |
-
#description="Ask me anything about the documents in my knowledge base!",
|
141 |
-
#scale=1,
|
142 |
examples=[
|
143 |
["How to do great work by Paul Graham?", 512, 0.7, 0.95],
|
144 |
-
["
|
145 |
-
["
|
146 |
],
|
147 |
retry_btn="Retry",
|
148 |
undo_btn="Undo",
|
|
|
83 |
|
84 |
# Gradio interface
|
85 |
info_text = """
|
|
|
86 |
by [Mark Redito](https://markredito.com)
|
87 |
+
This chatbot has access to my browser bookmarks from 2020 to mid-2024. It covers a variety of topics I’m interested in, including Art, Technology, and Culture.
|
|
|
88 |
|
89 |
## You can use it in a few ways:
|
|
|
90 |
- Extract specific links. For example: "Give me the links about Ethereum"
|
91 |
- Get summaries of bookmarked content. Try: "Summarize 'How to do great work' by Paul Graham"
|
92 |
- Ask general questions on various topics. Like: "What goes into a typical music recording contract?"
|
93 |
|
94 |
## Here's a quick rundown of how it works behind the scenes:
|
|
|
95 |
- The system uses RAG (Retrieval-Augmented Generation) with a framework called Langchain. Basically, it helps the chatbot find and use relevant information.
|
96 |
- The bookmarks are stored in a database called FAISS that makes searching super fast.
|
97 |
- The brains of the operation is Claude 3 Haiku, a small and fast AI model by Anthropic.
|
98 |
- When you ask a question, the system comes up with a few more related questions to help find the right links. It then searches the database and passes the best information to Claude to craft your answer.
|
99 |
|
100 |
Keep in mind, if the chatbot can't find good information to answer your question, it'll let you know by saying something like "I don't know" or "I can't find it." And like any AI, it might make mistakes sometimes.
|
|
|
101 |
This is mostly a fun project I put together for my own curiosity and enjoyment. While I can't make any promises about its performance, I hope you have fun exploring and maybe discover something interesting! Enjoy!
|
102 |
"""
|
103 |
|
|
|
|
|
104 |
# The respond function
|
|
|
105 |
def respond(message, history, max_tokens, temperature, top_p):
|
106 |
# Process user message through RAG chain
|
107 |
response = rag_chain.invoke({"input": message})
|
|
|
117 |
bot_message = respond(message, history, max_tokens, temperature, top_p)
|
118 |
return bot_message
|
119 |
|
120 |
+
# Refactored Gradio Interface
|
121 |
with gr.Blocks(fill_height=True) as demo:
|
122 |
+
# Main header
|
123 |
+
gr.Markdown("# Welcome to My Bookmarks Chatbot")
|
124 |
+
|
125 |
+
# Collapsible info section
|
126 |
+
with gr.Accordion("Click to view info about the chatbot", open=False):
|
127 |
+
gr.Markdown(info_text) # Load the info text into the collapsible section
|
128 |
|
129 |
with gr.Accordion("Advanced Options", open=False):
|
130 |
max_tokens_slider = gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens")
|
131 |
temperature_slider = gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature")
|
132 |
top_p_slider = gr.Slider(minimum=0.1, maximum=1.0, value=0.95, step=0.05, label="Top-p (nucleus sampling)")
|
133 |
|
134 |
+
# Chatbot interface with additional options
|
135 |
gr.ChatInterface(
|
136 |
fn=chat_response,
|
137 |
additional_inputs=[max_tokens_slider, temperature_slider, top_p_slider],
|
|
|
|
|
|
|
138 |
examples=[
|
139 |
["How to do great work by Paul Graham?", 512, 0.7, 0.95],
|
140 |
+
["Give me the links about Ethereum from the notes", 512, 0.7, 0.95],
|
141 |
+
["What goes into a typical music recording contract?", 512, 0.7, 0.95]
|
142 |
],
|
143 |
retry_btn="Retry",
|
144 |
undo_btn="Undo",
|