| pillow>=10.3.0 | |
| onnxruntime>=1.12.0 | |
| gradio | |
| wheel | |
| ninja | |
| transformers | |
| spaces | |
| decord | |
| einops | |
| bitsandbytes | |
| sentencepiece | |
| accelerate | |
| git+https://github.com/Dao-AILab/flash-attention.git |
| pillow>=10.3.0 | |
| onnxruntime>=1.12.0 | |
| gradio | |
| wheel | |
| ninja | |
| transformers | |
| spaces | |
| decord | |
| einops | |
| bitsandbytes | |
| sentencepiece | |
| accelerate | |
| git+https://github.com/Dao-AILab/flash-attention.git |