Omnibus commited on
Commit
727bcb3
·
verified ·
1 Parent(s): 43d8b3a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -5,7 +5,7 @@ client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
5
  #client = InferenceClient("Trelis/Mistral-7B-Instruct-v0.1-Summarize-16k")
6
  #client = InferenceClient("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T")
7
 
8
- from prompts import CONSPIRACY
9
  def format_prompt(message, history):
10
  prompt = "<s>"
11
  for user_prompt, bot_response in history:
@@ -34,7 +34,7 @@ def generate(
34
  #seed=42,
35
  )
36
 
37
- formatted_prompt = format_prompt(f"{system_prompt}, {prompt}", history)
38
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
39
  output = ""
40
 
 
5
  #client = InferenceClient("Trelis/Mistral-7B-Instruct-v0.1-Summarize-16k")
6
  #client = InferenceClient("TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T")
7
 
8
+ from prompts import GAME_MASTER
9
  def format_prompt(message, history):
10
  prompt = "<s>"
11
  for user_prompt, bot_response in history:
 
34
  #seed=42,
35
  )
36
 
37
+ formatted_prompt = format_prompt(GAME_MASTER, {prompt}", history)
38
  stream = client.text_generation(formatted_prompt, **generate_kwargs, stream=True, details=True, return_full_text=False)
39
  output = ""
40