momenaca's picture
update prompts with new details
ac7b044
raw
history blame
1.32 kB
from spinoza_project.source.backend.prompt_utils import SpecialTokens, make_chat_prompt
from langchain.prompts.chat import ChatPromptTemplate
def get_qa_prompts(config, prompts):
special_tokens = SpecialTokens(config)
role_instruction = make_chat_prompt(prompts["role_instruction"], special_tokens)
source_prompt = make_chat_prompt(prompts["source_prompt"], special_tokens)
# memory_prompt=make_chat_prompt(prompts['memory_prompt'], special_tokens)
question_answering_prompt = make_chat_prompt(
prompts["question_answering_prompt"], special_tokens
)
reformulation_prompt = make_chat_prompt(
prompts["reformulation_prompt"], special_tokens
)
# summarize_memory_prompt = make_chat_prompt(
# prompts["summarize_memory_prompt"], special_tokens
# )
chat_qa_prompt = ChatPromptTemplate.from_messages(
[
role_instruction,
source_prompt,
# memory_prompt,
question_answering_prompt,
]
)
chat_reformulation_prompt = ChatPromptTemplate.from_messages([reformulation_prompt])
# chat_summarize_memory_prompt = ChatPromptTemplate.from_messages([summarize_memory_prompt])
return (
chat_qa_prompt,
chat_reformulation_prompt,
) # , chat_summarize_memory_prompt