Spaces:
Sleeping
Sleeping
Update write_story.py
Browse files- write_story.py +12 -12
write_story.py
CHANGED
@@ -11,8 +11,8 @@ from config import save_novel_chapter
|
|
11 |
from config import generate_uuid
|
12 |
|
13 |
def print_step_costs(response, model):
|
14 |
-
input = response
|
15 |
-
output = response
|
16 |
|
17 |
if model == "gpt-4" or model == "gpt-4":
|
18 |
input_per_token = 0.00003
|
@@ -60,7 +60,7 @@ def generate_plots(prompt):
|
|
60 |
|
61 |
print_step_costs(response, llm_model_name)
|
62 |
|
63 |
-
return response
|
64 |
|
65 |
|
66 |
def select_most_engaging(plots):
|
@@ -79,7 +79,7 @@ def select_most_engaging(plots):
|
|
79 |
|
80 |
print_step_costs(response, llm_model_name)
|
81 |
|
82 |
-
return response
|
83 |
|
84 |
|
85 |
def improve_plot(plot):
|
@@ -93,7 +93,7 @@ def improve_plot(plot):
|
|
93 |
|
94 |
print_step_costs(response, llm_model_name)
|
95 |
|
96 |
-
return response
|
97 |
|
98 |
|
99 |
def get_title(plot):
|
@@ -109,7 +109,7 @@ def get_title(plot):
|
|
109 |
|
110 |
print_step_costs(response, llm_model_name)
|
111 |
|
112 |
-
return response
|
113 |
|
114 |
|
115 |
def write_first_chapter(plot, first_chapter_title, writing_style, claude=True):
|
@@ -200,13 +200,13 @@ def write_first_chapter(plot, first_chapter_title, writing_style, claude=True):
|
|
200 |
{"role": "system",
|
201 |
"content": "You are a world-class fantasy writer. Your job is to take your student's rough initial draft of the first chapter of their fantasy novel, and rewrite it to be significantly better, with much more detail."},
|
202 |
{"role": "user",
|
203 |
-
"content": f"Here is the high-level plot you asked your student to follow: {plot}\n\nHere is the first chapter they wrote: {response
|
204 |
]
|
205 |
)
|
206 |
|
207 |
print_step_costs(response, llm_model_name)
|
208 |
|
209 |
-
return improved_response
|
210 |
|
211 |
|
212 |
def write_chapter(previous_chapters, plot, chapter_title, claude=True):
|
@@ -246,7 +246,7 @@ def write_chapter(previous_chapters, plot, chapter_title, claude=True):
|
|
246 |
|
247 |
print_step_costs(response, llm_model_name)
|
248 |
|
249 |
-
return response
|
250 |
except:
|
251 |
response = completion_with_backoff(
|
252 |
model=llm_model_name,
|
@@ -259,7 +259,7 @@ def write_chapter(previous_chapters, plot, chapter_title, claude=True):
|
|
259 |
|
260 |
print_step_costs(response, llm_model_name)
|
261 |
|
262 |
-
return response
|
263 |
|
264 |
|
265 |
def generate_storyline(prompt, num_chapters):
|
@@ -291,7 +291,7 @@ def generate_storyline(prompt, num_chapters):
|
|
291 |
"Your job is to take your student's rough initial draft of the storyline of a fantasy novel in Chinese, "
|
292 |
"and rewrite it to be significantly better. Please respond in Chinese"},
|
293 |
{"role": "user",
|
294 |
-
"content": f"Here is the draft storyline they wrote: {response
|
295 |
f"rewrite the storyline in Chinese, in a way that is far superior to your student's version. "
|
296 |
f"It should have the same number of chapters, "
|
297 |
f"but it should be much improved in as many ways as possible. "
|
@@ -302,7 +302,7 @@ def generate_storyline(prompt, num_chapters):
|
|
302 |
|
303 |
print_step_costs(improved_response, llm_model_name)
|
304 |
|
305 |
-
return improved_response
|
306 |
|
307 |
|
308 |
def write_to_file(prompt, content):
|
|
|
11 |
from config import generate_uuid
|
12 |
|
13 |
def print_step_costs(response, model):
|
14 |
+
input = response.usage.prompt_tokens
|
15 |
+
output = response.usage.completion_tokens
|
16 |
|
17 |
if model == "gpt-4" or model == "gpt-4":
|
18 |
input_per_token = 0.00003
|
|
|
60 |
|
61 |
print_step_costs(response, llm_model_name)
|
62 |
|
63 |
+
return response.choices[0].message.content.split('\n')
|
64 |
|
65 |
|
66 |
def select_most_engaging(plots):
|
|
|
79 |
|
80 |
print_step_costs(response, llm_model_name)
|
81 |
|
82 |
+
return response.choices[0].message.content
|
83 |
|
84 |
|
85 |
def improve_plot(plot):
|
|
|
93 |
|
94 |
print_step_costs(response, llm_model_name)
|
95 |
|
96 |
+
return response.choices[0].message.content
|
97 |
|
98 |
|
99 |
def get_title(plot):
|
|
|
109 |
|
110 |
print_step_costs(response, llm_model_name)
|
111 |
|
112 |
+
return response.choices[0].message.content
|
113 |
|
114 |
|
115 |
def write_first_chapter(plot, first_chapter_title, writing_style, claude=True):
|
|
|
200 |
{"role": "system",
|
201 |
"content": "You are a world-class fantasy writer. Your job is to take your student's rough initial draft of the first chapter of their fantasy novel, and rewrite it to be significantly better, with much more detail."},
|
202 |
{"role": "user",
|
203 |
+
"content": f"Here is the high-level plot you asked your student to follow: {plot}\n\nHere is the first chapter they wrote: {response.choices[0].message.content}\n\nNow, rewrite the first chapter of this novel, in a way that is far superior to your student's chapter. It should still follow the exact same plot, but it should be far more detailed, much longer, and more engaging. Here is a description of the writing style you should use: `{writing_style}`.Please respond in Chinese."}
|
204 |
]
|
205 |
)
|
206 |
|
207 |
print_step_costs(response, llm_model_name)
|
208 |
|
209 |
+
return improved_response.choices[0].message.content
|
210 |
|
211 |
|
212 |
def write_chapter(previous_chapters, plot, chapter_title, claude=True):
|
|
|
246 |
|
247 |
print_step_costs(response, llm_model_name)
|
248 |
|
249 |
+
return response.choices[0].message.content
|
250 |
except:
|
251 |
response = completion_with_backoff(
|
252 |
model=llm_model_name,
|
|
|
259 |
|
260 |
print_step_costs(response, llm_model_name)
|
261 |
|
262 |
+
return response.choices[0].message.content
|
263 |
|
264 |
|
265 |
def generate_storyline(prompt, num_chapters):
|
|
|
291 |
"Your job is to take your student's rough initial draft of the storyline of a fantasy novel in Chinese, "
|
292 |
"and rewrite it to be significantly better. Please respond in Chinese"},
|
293 |
{"role": "user",
|
294 |
+
"content": f"Here is the draft storyline they wrote: {response.choices[0].message.content}\n\nNow, "
|
295 |
f"rewrite the storyline in Chinese, in a way that is far superior to your student's version. "
|
296 |
f"It should have the same number of chapters, "
|
297 |
f"but it should be much improved in as many ways as possible. "
|
|
|
302 |
|
303 |
print_step_costs(improved_response, llm_model_name)
|
304 |
|
305 |
+
return improved_response.choices[0].message.content
|
306 |
|
307 |
|
308 |
def write_to_file(prompt, content):
|