With openai harmony response, I cannot generate response.
#108
by
Noah0627
- opened
Thanks for your excellence contributions for open source. I'm trying to follow the official "How to run gpt-oss with Transformers" tutorials.
Here is my codes and error.
encoding = load_harmony_encoding(HarmonyEncodingName.HARMONY_GPT_OSS)
# Build conversation
convo = Conversation.from_messages([
Message.from_role_and_content(Role.SYSTEM, SystemContent.new()),
Message.from_role_and_content(Role.DEVELOPER, DeveloperContent.new().with_instructions(system_prompt)),
Message.from_role_and_content(Role.USER, user_prompt),
])
# Render prompt
prefill_ids = encoding.render_conversation_for_completion(convo, Role.ASSISTANT)
stop_token_ids = encoding.stop_tokens_for_assistant_actions()
# Load model
model_name = "openai/gpt-oss-20b"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(model_name, torch_dtype="auto", device_map="auto")
# Generate outputs
outputs = model.generate(
prefill_ids,
eos_token_id=stop_token_ids,
temperature=1e-4,
max_new_tokens=2000,
)
>>>---------------------------------------------------------------------------
AttributeError Traceback (most recent call last)
Cell In[48], line 2
1 # Generate
----> 2 outputs = model.generate(
3 input_ids=[prefill_ids],
4 max_new_tokens=2000,
5 eos_token_id=stop_token_ids,
6 )
File /opt/conda/envs/gpt-oss/lib/python3.11/site-packages/torch/utils/_contextlib.py:120, in context_decorator.<locals>.decorate_context(*args, **kwargs)
117 @functools.wraps(func)
118 def decorate_context(*args, **kwargs):
119 with ctx_factory():
--> 120 return func(*args, **kwargs)
File /opt/conda/envs/gpt-oss/lib/python3.11/site-packages/transformers/generation/utils.py:2290, in GenerationMixin.generate(self, inputs, generation_config, logits_processor, stopping_criteria, prefix_allowed_tokens_fn, synced_gpus, assistant_model, streamer, negative_prompt_ids, negative_prompt_attention_mask, use_model_defaults, custom_generate, **kwargs)
2286 # 3. Define model inputs
2287 inputs_tensor, model_input_name, model_kwargs = self._prepare_model_inputs(
2288 inputs, generation_config.bos_token_id, model_kwargs
2289 )
-> 2290 batch_size = inputs_tensor.shape[0]
2292 device = inputs_tensor.device
2293 self._prepare_special_tokens(generation_config, kwargs_has_attention_mask, device=device)
AttributeError: 'list' object has no attribute 'shape'
Of course, i could use "tokenizer method" but that is not a open harmony usecases, isn't it?
I think my issues is a very small thing... Can you help me?