lixinhao commited on
Commit
be4ed9b
·
verified ·
1 Parent(s): e8dd47d

Update modeling_videochat_flash.py

Browse files
Files changed (1) hide show
  1. modeling_videochat_flash.py +2 -2
modeling_videochat_flash.py CHANGED
@@ -683,8 +683,8 @@ class VideoChatFlashQwenForCausalLM(LlavaMetaForCausalLM, Qwen2ForCausalLM_Flash
683
 
684
  outputs = outputs.strip()
685
 
686
- print(f"\033[91m== Question: \033[0m\n{prompt}\n")
687
- print(f"\033[91m== Response: \033[0m\n{outputs}\n")
688
 
689
  if chat_history is None:
690
  chat_history = []
 
683
 
684
  outputs = outputs.strip()
685
 
686
+ # print(f"\033[91m== Question: \033[0m\n{prompt}\n")
687
+ # print(f"\033[91m== Response: \033[0m\n{outputs}\n")
688
 
689
  if chat_history is None:
690
  chat_history = []