Spaces:
Sleeping
Sleeping
import os | |
import sys | |
# Add src directory to Python path for Hugging Face Spaces compatibility | |
PROJECT_ROOT = os.path.dirname(os.path.abspath(__file__)) | |
SRC_DIR = os.path.join(PROJECT_ROOT, "src") | |
sys.path.insert(0, SRC_DIR) | |
import json | |
import gradio as gr | |
from numpy import add | |
from novel_heroes.i18n import i18n | |
from novel_heroes.llm_call import LLMCall | |
from novel_heroes.mcp_server import get_book_content, get_book_list | |
# NOTE: In Gradio, global variables are SHARED across all users. | |
# https://www.gradio.app/guides/state-in-blocks#global-state | |
# Maximum length for book content in characters. | |
# If the book is too long, it will be truncated. | |
# NOTE: Gemma-3-4B has a context length of 128k in tokens | |
BOOK_CONTENT_MAX_LENGTH = 300 * 1000 | |
def load_book_content(book_name, progress=gr.Progress()): | |
"""Load book content and extract heroes.""" | |
if not book_name: | |
return gr.Dropdown(choices=[], value=""), "Please select a book first." | |
progress(0.1, desc="Loading book content...") | |
book_content = get_book_content(book_name, BOOK_CONTENT_MAX_LENGTH) | |
progress(0.5, desc="Extracting heroes from the book...") | |
# Extract heroes using LLM | |
try: | |
call_llm = LLMCall() | |
heroes = call_llm.listup_heroes(book_content) | |
progress(1.0, desc="Complete!") | |
if heroes: | |
return ( | |
book_name, | |
book_content, | |
heroes, | |
gr.Dropdown(choices=heroes, value=heroes[0]), | |
f"Successfully loaded '{book_name}' and found {len(heroes)} heroes.", | |
) | |
else: | |
return ( | |
book_name, | |
book_content, | |
heroes, | |
gr.Dropdown(choices=[], value=""), | |
f"Loaded '{book_name}' but no heroes were found.", | |
) | |
except Exception as e: | |
return ( | |
book_name, | |
book_content, | |
heroes, | |
gr.Dropdown(choices=[], value=""), | |
f"Error extracting heroes: {str(e)}", | |
) | |
def custom_respond( | |
message, history, book_name, book_content, selected_hero, lang_prompt | |
): | |
"""Custom respond function with dynamic system prompt.""" | |
if not book_name or not selected_hero: | |
yield "Please select a book and hero first." | |
return | |
if not book_content: | |
book_content = get_book_content(book_name, BOOK_CONTENT_MAX_LENGTH) | |
# Generate system prompt | |
# fmt: off | |
system_prompt = ( | |
f"You are {selected_hero}, a character from the book '{book_name}'. " | |
"Behave and respond according to the personality and attitude of this character. " | |
"If the character is unfriendly, respond unfriendly; if the character is kind, respond kindly. " | |
"You don't know that you are a character in a book." | |
"Below is the book content:\n\n" | |
"====================\n" | |
f"{book_content}\n" | |
"====================\n" | |
f"{lang_prompt}" | |
) | |
# fmt: on | |
# Call the original respond method with custom system prompt | |
call_llm = LLMCall() | |
for response in call_llm.respond(message, history, system_prompt=system_prompt): | |
yield response | |
def gradio_ui(): | |
book_name = gr.BrowserState("") | |
heroes = gr.BrowserState([]) | |
selected_hero = gr.BrowserState("") | |
book_choices = json.loads(get_book_list()) | |
with gr.Blocks() as demo: | |
with gr.Tab("CLOSED"): | |
gr.Markdown( | |
# fmt: off | |
"β οΈ**Note:** <br>" | |
"The Modal server used in this project has been stopped. <br>" | |
"If you would like to run it yourself, please clone this project via git and follow the instructions in the README.md to set up and run it." | |
# fmt: on | |
) | |
with gr.Tab("Chat"): | |
# Title | |
with gr.Row(): | |
gr.Markdown("# Novel Heroes") | |
# Status | |
with gr.Row(): | |
status_markdown = gr.Markdown(label="Status") | |
status_markdown.value = ( | |
# fmt: off | |
"Welcome! " | |
"Please select a book and push the button to load it.<br>" | |
"β οΈ**Note:** Cold start may take up to 3 minutes. Please wait patiently for the initial response." | |
# fmt: on | |
) | |
# Hidden textbox for book content | |
book_content = gr.Textbox(visible=False) | |
with gr.Row(): | |
with gr.Column(scale=1): | |
# Book selection | |
with gr.Row(): | |
book_dropdown = gr.Dropdown( | |
label="π Select a Book", | |
choices=book_choices, | |
interactive=True, | |
) | |
with gr.Row(): | |
load_button = gr.Button( | |
"Load Book & Extract Heroes", variant="primary" | |
) | |
# Hero selection | |
with gr.Row(): | |
hero_dropdown = gr.Dropdown( | |
label="π¦Έ Select a Hero", choices=[], interactive=True | |
) | |
load_button.click( | |
fn=load_book_content, | |
inputs=[book_dropdown], | |
outputs=[ | |
book_name, | |
book_content, | |
heroes, | |
hero_dropdown, | |
status_markdown, | |
], | |
) | |
def update_hero_selection( | |
hero, | |
): | |
"""Update selected hero.""" | |
return hero | |
hero_dropdown.change( | |
fn=update_hero_selection, | |
inputs=[hero_dropdown], | |
outputs=[selected_hero], | |
) | |
with gr.Row(): | |
lang_textbox = gr.Textbox( | |
label="π Language Prompt", | |
value=i18n("lang_prompt"), | |
) | |
with gr.Row(): | |
gr.Markdown( | |
"π‘ **Tip:** You can change the language prompt to make the chatbot reply in your favorite language!<br>" | |
"For example, try 'Please respond in Japanese.' or 'ζ₯ζ¬θͺγ§ηγγ¦γ' π<br>" | |
"<br>" | |
"π **Note:** Once you start chatting, you can't change the book or hero. <br>" | |
"If you want to pick a different one, just hit the reset button and start fresh! π<br>" | |
) | |
with gr.Column(scale=2): | |
# Chat interface | |
chat_interface = gr.ChatInterface( | |
fn=custom_respond, | |
additional_inputs=[ | |
book_dropdown, | |
book_content, | |
hero_dropdown, | |
lang_textbox, | |
], | |
type="messages", | |
autofocus=False | |
) | |
reset_button = gr.Button("Reset", variant="secondary") | |
reset_button.click( | |
lambda: ( | |
gr.update(value=""), | |
gr.update(value=""), | |
gr.update(choices=[], value=None), | |
gr.update(value=i18n("lang_prompt")), | |
"Status reset.", | |
[], | |
), | |
outputs=[ | |
book_dropdown, | |
book_content, | |
hero_dropdown, | |
lang_textbox, | |
status_markdown, | |
chat_interface.chatbot_value, | |
], | |
) | |
with gr.Tab("List Books"): | |
gr.Markdown("π Get the list of available books.") | |
btn = gr.Button("Get") | |
output_text = gr.Textbox(label="Books") | |
btn.click(get_book_list, None, output_text) | |
with gr.Tab("Book Content"): | |
gr.Markdown("π Get the content of a book by its name.") | |
book_name = gr.Textbox(label="Book Name") | |
book_len = gr.Number(label="Max Length", value=1000) | |
book_content = gr.Textbox(label="Book Content", lines=20) | |
btn = gr.Button("Get") | |
btn.click(get_book_content, [book_name, book_len], book_content) | |
return demo | |
if __name__ == "__main__": | |
demo = gradio_ui() | |
# Launch the Gradio app with MCP server enabled. | |
# NOTE: It is required to restart the app when you add or remove MCP tools. | |
demo.launch(i18n=i18n, mcp_server=True) | |