{ | |
"_attn_implementation_autoset": true, | |
"answer_space": [ | |
"1", | |
"2", | |
"3", | |
"4", | |
"bed", | |
"books", | |
"cabinet", | |
"chair", | |
"lamp", | |
"paper", | |
"photo", | |
"picture", | |
"pillow", | |
"red", | |
"sofa", | |
"table", | |
"white", | |
"window" | |
], | |
"base_model_name": "blip2", | |
"classification_input_dim": 768, | |
"dataset_name": "daquar", | |
"image_text_hidden_size": 256, | |
"image_token_index": null, | |
"initializer_factor": 1.0, | |
"initializer_range": 0.02, | |
"interm_dim": 1024, | |
"model_type": "blip-2", | |
"multi_class_classifier": true, | |
"num_query_tokens": 32, | |
"qformer_config": { | |
"model_type": "blip_2_qformer" | |
}, | |
"text_config": { | |
"model_type": "opt" | |
}, | |
"transformers_version": "4.46.1", | |
"use_decoder_only_language_model": true, | |
"vision_config": { | |
"model_type": "blip_2_vision_model" | |
} | |
} | |