{ "experiment": { "tokenizer_checkpoint": "tatitok_bl32_vae.bin", "output_dir": "tatitok_bl32_vae" }, "model": { "vq_model": { "quantize_mode": "vae", "token_size": 16, "vit_enc_model_size": "base", "vit_dec_model_size": "large", "vit_enc_patch_size": 16, "vit_dec_patch_size": 16, "num_latent_tokens": 32, "finetune_decoder": false, "is_legacy": false } }, "dataset": { "preprocessing": { "crop_size": 256 } } }