File size: 326 Bytes
ae92908 |
1 2 3 4 5 6 7 8 9 10 11 12 13 |
{
"architectures": [
"T5ForMultiTasking"
],
"pooling": "max",
"sequence_classification_decoder_loss_scaling": 1.0,
"sequence_classification_loss_scaling": 1.0,
"t5_checkpoint": "google/flan-t5-large",
"token_classification_loss_scaling": 1.0,
"torch_dtype": "float32",
"transformers_version": "4.33.1"
}
|