Spaces:
Runtime error
Runtime error
| import gradio as gr | |
| from transformers import pipeline | |
| import spaces | |
| model_name = "openai/gpt-oss-20b" | |
| pipe = pipeline( | |
| "text-generation", | |
| model=model_name, | |
| torch_dtype="auto", | |
| device_map="auto", | |
| ) | |
| default_text = """<|start|>system<|message|>You are ChatGPT, a large language model trained by OpenAI. | |
| Knowledge cutoff: 2024-06 | |
| Current date: 2025-06-28 | |
| Reasoning: high | |
| # Valid channels: analysis, commentary, final. Channel must be included for every message. | |
| Calls to these tools must go to the commentary channel: 'functions'.<|end|><|start|>developer<|message|># Instructions | |
| Use a friendly tone. | |
| # Tools | |
| ## functions | |
| namespace functions { | |
| // Gets the location of the user. | |
| type get_location = () => any; | |
| // Gets the current weather in the provided location. | |
| type get_current_weather = (_: { | |
| // The city and state, e.g. San Francisco, CA | |
| location: string, | |
| format?: "celsius" | "fahrenheit", // default: celsius | |
| }) => any; | |
| // Gets the current weather in the provided list of locations. | |
| type get_multiple_weathers = (_: { | |
| // List of city and state, e.g. ["San Francisco, CA", "New York, NY"] | |
| locations: string[], | |
| format?: "celsius" | "fahrenheit", // default: celsius | |
| }) => any; | |
| } // namespace functions<|end|><|start|>user<|message|>What is the weather like in SF?<|end|><|start|>assistant""" | |
| def infer(prompt, max_tokens, temperature, top_k, top_p): | |
| # pipeline 參數名稱略有不同 | |
| result = pipe( | |
| prompt, | |
| max_new_tokens=max_tokens, | |
| temperature=temperature, | |
| top_k=top_k, | |
| top_p=top_p, | |
| do_sample=True, | |
| return_full_text=True, | |
| skip_special_tokens=False | |
| ) | |
| return result[0]["generated_text"] if result and "generated_text" in result[0] else "" | |
| with gr.Blocks() as demo: | |
| gr.Markdown("# GPT-OSS-20B 文字接龍推論\n\n- 請參考[https://cookbook.openai.com/articles/openai-harmony](https://cookbook.openai.com/articles/openai-harmony)") | |
| with gr.Row(): | |
| prompt = gr.Textbox(label="請輸入完整 prompt", value=default_text, lines=20, max_lines=40, show_copy_button=True) | |
| with gr.Row(): | |
| max_tokens = gr.Slider(1, 2048, value=128, step=1, label="最大生成 token 數 (max_tokens)") | |
| temperature = gr.Slider(0.1, 2.0, value=1.0, step=0.01, label="溫度 (temperature)") | |
| top_k = gr.Slider(1, 100, value=40, step=1, label="Top-k") | |
| top_p = gr.Slider(0.0, 1.0, value=1.0, step=0.01, label="Top-p") | |
| #output = gr.Textbox(label="模型輸出", lines=20, max_lines=40, show_copy_button=True) | |
| btn = gr.Button("送出") | |
| btn.click( | |
| infer, | |
| inputs=[prompt, max_tokens, temperature, top_k, top_p], | |
| outputs=prompt | |
| ) | |
| if __name__ == "__main__": | |
| demo.launch(share=True) |