Spaces:
Build error
Build error
juancopi81
commited on
Commit
·
87a5635
1
Parent(s):
3c6c416
Add description
Browse files- model.py +8 -2
- packages.txt +1 -1
- requirements.txt +2 -1
- utils.py +3 -0
model.py
CHANGED
@@ -1,8 +1,7 @@
|
|
|
|
1 |
from typing import Tuple
|
2 |
-
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
4 |
|
5 |
-
|
6 |
# Initialize the model and tokenizer variables as None
|
7 |
tokenizer = None
|
8 |
model = None
|
@@ -17,9 +16,16 @@ def get_model_and_tokenizer() -> Tuple[AutoModelForCausalLM, AutoTokenizer]:
|
|
17 |
"""
|
18 |
global model, tokenizer
|
19 |
if model is None or tokenizer is None:
|
|
|
|
|
|
|
20 |
# Load the tokenizer and the model
|
21 |
tokenizer = AutoTokenizer.from_pretrained("juancopi81/lmd_8bars_tokenizer")
|
22 |
model = AutoModelForCausalLM.from_pretrained(
|
23 |
"juancopi81/lmd-8bars-2048-epochs20_v3"
|
24 |
)
|
|
|
|
|
|
|
|
|
25 |
return model, tokenizer
|
|
|
1 |
+
import torch
|
2 |
from typing import Tuple
|
|
|
3 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
4 |
|
|
|
5 |
# Initialize the model and tokenizer variables as None
|
6 |
tokenizer = None
|
7 |
model = None
|
|
|
16 |
"""
|
17 |
global model, tokenizer
|
18 |
if model is None or tokenizer is None:
|
19 |
+
# Set device
|
20 |
+
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
21 |
+
|
22 |
# Load the tokenizer and the model
|
23 |
tokenizer = AutoTokenizer.from_pretrained("juancopi81/lmd_8bars_tokenizer")
|
24 |
model = AutoModelForCausalLM.from_pretrained(
|
25 |
"juancopi81/lmd-8bars-2048-epochs20_v3"
|
26 |
)
|
27 |
+
|
28 |
+
# Move model to device
|
29 |
+
model = model.to(device)
|
30 |
+
|
31 |
return model, tokenizer
|
packages.txt
CHANGED
@@ -1,4 +1,4 @@
|
|
1 |
-
|
2 |
build-essential
|
3 |
libasound2-dev
|
4 |
libjack-dev
|
|
|
1 |
+
libfluidsynth
|
2 |
build-essential
|
3 |
libasound2-dev
|
4 |
libjack-dev
|
requirements.txt
CHANGED
@@ -1,4 +1,5 @@
|
|
1 |
note-seq
|
2 |
matplotlib
|
3 |
transformers
|
4 |
-
pyfluidsynth
|
|
|
|
1 |
note-seq
|
2 |
matplotlib
|
3 |
transformers
|
4 |
+
pyfluidsynth
|
5 |
+
torch
|
utils.py
CHANGED
@@ -71,6 +71,9 @@ def generate_new_instrument(
|
|
71 |
# Encode the conditioning tokens.
|
72 |
input_ids = tokenizer.encode(seed, return_tensors="pt")
|
73 |
|
|
|
|
|
|
|
74 |
# Generate more tokens.
|
75 |
eos_token_id = tokenizer.encode("TRACK_END")[0]
|
76 |
generated_ids = model.generate(
|
|
|
71 |
# Encode the conditioning tokens.
|
72 |
input_ids = tokenizer.encode(seed, return_tensors="pt")
|
73 |
|
74 |
+
# Move the input_ids tensor to the same device as the model
|
75 |
+
input_ids = input_ids.to(model.device)
|
76 |
+
|
77 |
# Generate more tokens.
|
78 |
eos_token_id = tokenizer.encode("TRACK_END")[0]
|
79 |
generated_ids = model.generate(
|