lixinhao commited on
Commit
2cc4ab3
·
verified ·
1 Parent(s): ffa7227

Update modeling_videochat_flash.py

Browse files
Files changed (1) hide show
  1. modeling_videochat_flash.py +1 -1
modeling_videochat_flash.py CHANGED
@@ -679,7 +679,7 @@ class VideoChatFlashQwenForCausalLM(LlavaMetaForCausalLM, Qwen2ForCausalLM_Flash
679
 
680
  outputs = tokenizer.batch_decode(output_ids, skip_special_tokens=True)[0].strip()
681
  if outputs.endswith(stop_str):
682
- outputs = outputs[: -len(stop_str)]
683
 
684
  outputs = outputs.strip()
685
 
 
679
 
680
  outputs = tokenizer.batch_decode(output_ids, skip_special_tokens=True)[0].strip()
681
  if outputs.endswith(stop_str):
682
+ outputs = outputs[: -len(stop_str)]
683
 
684
  outputs = outputs.strip()
685