Spaces:
Runtime error
Runtime error
File size: 873 Bytes
3e7fb54 7db3c78 3e7fb54 f3c977f f4c88e6 f3c977f fdf814b 6e171c0 fdf814b 3e7fb54 7db3c78 f3c977f fdf814b 7db3c78 f4c88e6 f3c977f |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 |
import gradio as gr
from transformers import AutoModelForCausalLM, AutoTokenizer
model_id = "deepseek-ai/DeepSeek-V3"
tokenizer = AutoTokenizer.from_pretrained(model_id)
model = AutoModelForCausalLM.from_pretrained(model_id, trust_remote_code=True) # ADD trust_remote_code=True
def predict(message, history):
prompt = tokenizer.apply_chat_template(
[{"role": "user", "content": message}],
tokenize=False,
add_generation_prompt=True
)
inputs = tokenizer(prompt, return_tensors="pt").to("cuda") # Assuming you have CUDA available in your Space
outputs = model.generate(**inputs, max_new_tokens=50) # Adjust max_new_tokens as needed
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response
iface = gr.ChatInterface(
fn=predict,
inputs=gr.Chatbox(),
outputs=gr.Chatbot()
)
iface.launch() |