| { | |
| "bits": 4, | |
| "group_size": 128, | |
| "sym": true, | |
| "data_type": "int", | |
| "seqlen": 1024, | |
| "batch_size": 1, | |
| "gradient_accumulate_steps": 8, | |
| "dataset": "liuhaotian/llava_conv_58k", | |
| "autoround_version": "0.6.1.dev", | |
| "block_name_to_quantize": "model.language_model.layers", | |
| "quant_method": "auto-round", | |
| "packing_format": "auto_round:auto_gptq" | |
| } |