Upload folder using huggingface_hub
Browse files- configuration_sa2va_chat.py +1 -1
- modeling_sa2va_chat.py +1 -1
configuration_sa2va_chat.py
CHANGED
@@ -39,7 +39,6 @@ class Sa2VAChatConfig(PretrainedConfig):
|
|
39 |
max_dynamic_patch=6,
|
40 |
**kwargs):
|
41 |
super().__init__(**kwargs)
|
42 |
-
|
43 |
if vision_config is None:
|
44 |
vision_config = {}
|
45 |
logger.info('vision_config is None. Initializing the InternVisionConfig with default values.')
|
@@ -49,6 +48,7 @@ class Sa2VAChatConfig(PretrainedConfig):
|
|
49 |
logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
|
50 |
|
51 |
self.vision_config = InternVisionConfig(**vision_config)
|
|
|
52 |
if llm_config['architectures'][0] == 'LlamaForCausalLM':
|
53 |
self.llm_config = LlamaConfig(**llm_config)
|
54 |
elif llm_config['architectures'][0] == 'InternLM2ForCausalLM':
|
|
|
39 |
max_dynamic_patch=6,
|
40 |
**kwargs):
|
41 |
super().__init__(**kwargs)
|
|
|
42 |
if vision_config is None:
|
43 |
vision_config = {}
|
44 |
logger.info('vision_config is None. Initializing the InternVisionConfig with default values.')
|
|
|
48 |
logger.info('llm_config is None. Initializing the LlamaConfig config with default values (`LlamaConfig`).')
|
49 |
|
50 |
self.vision_config = InternVisionConfig(**vision_config)
|
51 |
+
|
52 |
if llm_config['architectures'][0] == 'LlamaForCausalLM':
|
53 |
self.llm_config = LlamaConfig(**llm_config)
|
54 |
elif llm_config['architectures'][0] == 'InternLM2ForCausalLM':
|
modeling_sa2va_chat.py
CHANGED
@@ -513,7 +513,7 @@ class Sa2VAChatModel(PreTrainedModel):
|
|
513 |
attention_mask=attention_mask.to(device),
|
514 |
generation_config=generation_config,
|
515 |
output_hidden_states=output_hidden_states,
|
516 |
-
return_dict=return_dict,
|
517 |
use_cache=True,
|
518 |
**generate_kwargs,
|
519 |
)
|
|
|
513 |
attention_mask=attention_mask.to(device),
|
514 |
generation_config=generation_config,
|
515 |
output_hidden_states=output_hidden_states,
|
516 |
+
# return_dict=return_dict,
|
517 |
use_cache=True,
|
518 |
**generate_kwargs,
|
519 |
)
|