Upload folder using huggingface_hub
Browse files
README.md
CHANGED
@@ -113,10 +113,10 @@ from transformers import AutoModelForCausalLM, AutoTokenizer
|
|
113 |
modelpath = "Chain-GPT/Solidity-LLM"
|
114 |
|
115 |
tokenizer = AutoTokenizer.from_pretrained(modelpath)
|
116 |
-
model = AutoModelForCausalLM.from_pretrained(modelpath).cuda
|
117 |
|
118 |
prompt = "Write a Solidity function to transfer tokens."
|
119 |
-
inputs = tokenizer(prompt, return_tensors="pt").cuda
|
120 |
|
121 |
outputs = model.generate(**inputs, max_new_tokens=1400, pad_token_id=tokenizer.eos_token_id)
|
122 |
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
|
|
113 |
modelpath = "Chain-GPT/Solidity-LLM"
|
114 |
|
115 |
tokenizer = AutoTokenizer.from_pretrained(modelpath)
|
116 |
+
model = AutoModelForCausalLM.from_pretrained(modelpath).to("cuda")
|
117 |
|
118 |
prompt = "Write a Solidity function to transfer tokens."
|
119 |
+
inputs = tokenizer(prompt, return_tensors="pt").to("cuda")
|
120 |
|
121 |
outputs = model.generate(**inputs, max_new_tokens=1400, pad_token_id=tokenizer.eos_token_id)
|
122 |
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
|