RecursionError: maximum recursion depth exceeded while calling a Python object

#5
by jackpudding - opened

运行代码:
from PIL import Image
from transformers import AutoModelForCausalLM, AutoTokenizer, AutoProcessor
from starvector.data.util import process_and_rasterize_svg
import torch

starvector = AutoModelForCausalLM.from_pretrained("/local/path/", torch_dtype=torch.float16, trust_remote_code=True, local_files_only=False)
processor = starvector.model.processor
tokenizer = starvector.model.svg_transformer.tokenizer

starvector.cuda()
starvector.eval()

image_pil = Image.open('/assets/examples/sample-18.png')

image = processor(image_pil, return_tensors="pt")['pixel_values'].cuda()
if not image.shape[0] == 1:
image = image.squeeze(0)
batch = {"image": image}

raw_svg = starvector.generate_im2svg(batch, max_length=4000)[0]
svg, raster_image = process_and_rasterize_svg(raw_svg)
报错:

RecursionError Traceback (most recent call last)
File ~/anaconda3/envs/starvector/lib/python3.11/site-packages/transformers/tokenization_utils_base.py:2292, in PreTrainedTokenizerBase._from_pretrained(cls, resolved_vocab_files, pretrained_model_name_or_path, init_configuration, token, cache_dir, local_files_only, _commit_hash, _is_local, trust_remote_code, *init_inputs, **kwargs)
2291 try:
-> 2292 tokenizer = cls(*init_inputs, **init_kwargs)
2293 except import_protobuf_decode_error():

File ~/anaconda3/envs/starvector/lib/python3.11/site-packages/transformers/models/gpt2/tokenization_gpt2.py:169, in GPT2Tokenizer.init(self, vocab_file, merges_file, errors, unk_token, bos_token, eos_token, pad_token, add_prefix_space, add_bos_token, **kwargs)
167 self.pat = re.compile(r"""'s|'t|'re|'ve|'m|'ll|'d| ?\p{L}+| ?\p{N}+| ?[^\s\p{L}\p{N}]+|\s+(?!\S)|\s+""")
--> 169 super().init(
170 errors=errors,
171 unk_token=unk_token,
172 bos_token=bos_token,
173 eos_token=eos_token,
174 pad_token=pad_token,
175 add_prefix_space=add_prefix_space,
176 add_bos_token=add_bos_token,
177 **kwargs,
178 )

File ~/anaconda3/envs/starvector/lib/python3.11/site-packages/transformers/tokenization_utils.py:435, in PreTrainedTokenizer.init(self, **kwargs)
434 # 4 init the parent class
--> 435 super().init(**kwargs)
437 # 4. If some of the special tokens are not part of the vocab, we add them, at the end.
438 # the order of addition is the same as self.SPECIAL_TOKENS_ATTRIBUTES following tokenizers
...
File :1294, in _get_parent_path(self)

File :1285, in _find_parent_path_names(self)

RecursionError: maximum recursion depth exceeded while calling a Python object

Sign up or log in to comment