danielhanchen's picture
Add files using upload-large-folder tool
f1ea482 verified
raw
history blame contribute delete
207 Bytes
default_stage:
default_modifiers:
QuantizationModifier:
targets: [Linear, GraniteMoeHybridParallelExpertsLinear]
ignore: [lm_head, 're:.*block_sparse_moe.router']
scheme: FP8_DYNAMIC