| models: | |
| - model: datatab/Serbian-Mistral-Orca-Slim-v1 | |
| parameters: | |
| weight: 1.0 | |
| - model: mlabonne/AlphaMonarch-7B | |
| parameters: | |
| weight: 1.0 | |
| - model: datatab/YugoGPT-Alpaca-v1-epoch1-good | |
| parameters: | |
| weight: 1.0 | |
| merge_method: linear | |
| dtype: float16 | |
| models: | |
| - model: datatab/Serbian-Mistral-Orca-Slim-v1 | |
| parameters: | |
| weight: 1.0 | |
| - model: mlabonne/AlphaMonarch-7B | |
| parameters: | |
| weight: 1.0 | |
| - model: datatab/YugoGPT-Alpaca-v1-epoch1-good | |
| parameters: | |
| weight: 1.0 | |
| merge_method: linear | |
| dtype: float16 | |