Spaces:
Sleeping
Sleeping
File size: 1,800 Bytes
d3dcfdf 489773a d3dcfdf 489773a d3dcfdf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 |
import gradio as gr
from langchain.embeddings import OpenAIEmbeddings
from langchain.vectorstores import FAISS
import os
os.environ["OPENAI_API_KEY"] = os.environ["openai"]
embeddings = OpenAIEmbeddings(model="text-embedding-3-large")
# Load the vector store
vector_store = FAISS.load_local(
"yc_index", embeddings, allow_dangerous_deserialization=True
)
# Create a retriever with the vector store
retriever = vector_store.as_retriever(search_type="mmr")
# Function to use the retriever on an input query
def retrieve_result(query, k=10):
retriever.search_kwargs["k"] = k
result = retriever.invoke(query)
res = []
for r in result:
formatted_result = f"""
<b>Name</b>: {r.metadata.get('name')}<br>
<b>One Liner</b>: {r.metadata.get('oneLiner')}<br>
<b>Website</b>: <a href='{r.metadata.get('website')}' target='_blank'>{r.metadata.get('website')}</a><br>
<b>Status</b>: {r.metadata.get('status')}<br>
<b>Locations</b>: {r.metadata.get('locations')}
"""
res.append(formatted_result.strip())
return "<br><br>".join(res)
# Set up the Gradio UI using Blocks
with gr.Blocks() as demo:
gr.Markdown("# YCombinator Startups Semantic Search")
#gr.Markdown("Enter a query to search the vector store for relevant results about legal tech startups.")
with gr.Row():
input_text = gr.Textbox(label="Describe your startup idea")
k_value = gr.Number(label="Top K startups", value=5)
submit_button = gr.Button("Submit")
with gr.Row():
output_text = gr.HTML(label="Result")
submit_button.click(fn=lambda query, k: '', inputs=[input_text, k_value], outputs=output_text)
submit_button.click(fn=retrieve_result, inputs=[input_text, k_value], outputs=output_text)
demo.launch() |