Update agent.py
Browse files
agent.py
CHANGED
@@ -189,16 +189,32 @@ def build_graph(provider: str = "huggingface"):
|
|
189 |
from langchain_core.messages import AIMessage
|
190 |
|
191 |
def retriever(state: MessagesState):
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
196 |
if "Final answer :" in content:
|
197 |
answer = content.split("Final answer :")[-1].strip()
|
198 |
else:
|
199 |
answer = content.strip()
|
200 |
|
201 |
-
|
|
|
202 |
|
203 |
# builder = StateGraph(MessagesState)
|
204 |
#builder.add_node("retriever", retriever)
|
|
|
189 |
from langchain_core.messages import AIMessage
|
190 |
|
191 |
def retriever(state: MessagesState):
|
192 |
+
query = state["messages"][-1].content
|
193 |
+
# 1. Embed the query to vector
|
194 |
+
query_embedding = embeddings.embed_query(query) # list of floats
|
195 |
+
|
196 |
+
# 2. Call the RPC function directly
|
197 |
+
response = supabase.rpc(
|
198 |
+
'match_documents_langchain',
|
199 |
+
{
|
200 |
+
'match_count': 2,
|
201 |
+
'query_embedding': query_embedding
|
202 |
+
}
|
203 |
+
).execute()
|
204 |
+
|
205 |
+
docs = response.data
|
206 |
+
if not docs or len(docs) == 0:
|
207 |
+
answer = "Sorry, I couldn't find an answer to your question."
|
208 |
+
else:
|
209 |
+
content = docs[0]['content'] # get content of the first matched doc
|
210 |
+
# Extract answer if it has 'Final answer :' pattern
|
211 |
if "Final answer :" in content:
|
212 |
answer = content.split("Final answer :")[-1].strip()
|
213 |
else:
|
214 |
answer = content.strip()
|
215 |
|
216 |
+
return {"messages": [AIMessage(content=answer)]}
|
217 |
+
|
218 |
|
219 |
# builder = StateGraph(MessagesState)
|
220 |
#builder.add_node("retriever", retriever)
|