hussamalafandi commited on
Commit
7976834
·
1 Parent(s): d5448bc

Refactor respond function to include system message in model input and improve clarity

Browse files
Files changed (1) hide show
  1. app.py +9 -4
app.py CHANGED
@@ -1,6 +1,7 @@
 
1
  import gradio as gr
2
  from langchain.chat_models import init_chat_model
3
- from langchain_core.messages import HumanMessage, AIMessage
4
 
5
  model = init_chat_model("gemini-2.0-flash", model_provider="google_genai")
6
 
@@ -17,6 +18,8 @@ def respond(
17
  Respond to user input using the model.
18
  """
19
  history_langchain_format = []
 
 
20
  for msg in dialog_history:
21
  if msg['role'] == "user":
22
  history_langchain_format.append(
@@ -24,9 +27,11 @@ def respond(
24
  elif msg['role'] == "assistant":
25
  history_langchain_format.append(AIMessage(content=msg['content']))
26
 
27
- history_langchain_format.append(HumanMessage(content=user_input))
28
-
29
- response = model.invoke(history_langchain_format)
 
 
30
  return response.content
31
 
32
 
 
1
+ from re import S
2
  import gradio as gr
3
  from langchain.chat_models import init_chat_model
4
+ from langchain_core.messages import HumanMessage, AIMessage, SystemMessage
5
 
6
  model = init_chat_model("gemini-2.0-flash", model_provider="google_genai")
7
 
 
18
  Respond to user input using the model.
19
  """
20
  history_langchain_format = []
21
+
22
+ # Add the dialog history to the history
23
  for msg in dialog_history:
24
  if msg['role'] == "user":
25
  history_langchain_format.append(
 
27
  elif msg['role'] == "assistant":
28
  history_langchain_format.append(AIMessage(content=msg['content']))
29
 
30
+ # Combine the system message, history, and user input into a single list
31
+ model_input = [SystemMessage(content=system_message)] + \
32
+ history_langchain_format + [HumanMessage(content=user_input)]
33
+
34
+ response = model.invoke(model_input)
35
  return response.content
36
 
37