| models: | |
| - model: context-labs/meta-llama-Llama-3.2-3B-Instruct-FP16 | |
| - model: NousResearch/Hermes-3-Llama-3.2-3B | |
| parameters: | |
| density: 0.6 | |
| weight: 0.5 | |
| merge_method: dare_ties | |
| base_model: context-labs/meta-llama-Llama-3.2-3B-Instruct-FP16 | |
| parameters: | |
| int8_mask: true | |
| dtype: bfloat16 |