Spaces:
Running
on
Zero
Running
on
Zero
Update Gradio app with multiple files
Browse files- app.py +7 -1
- requirements.txt +1 -0
app.py
CHANGED
|
@@ -10,6 +10,7 @@ from transformers import (
|
|
| 10 |
)
|
| 11 |
from huggingface_hub import login
|
| 12 |
import threading
|
|
|
|
| 13 |
|
| 14 |
"""
|
| 15 |
Gradio chat app for facebook/MobileLLM-Pro
|
|
@@ -77,6 +78,7 @@ def _history_to_messages(history: List[Tuple[str, str]]) -> List[Dict[str, str]]
|
|
| 77 |
return messages
|
| 78 |
|
| 79 |
|
|
|
|
| 80 |
def generate_stream(
|
| 81 |
message: str,
|
| 82 |
history: List[Tuple[str, str]],
|
|
@@ -150,6 +152,10 @@ with gr.Blocks(title="MobileLLM-Pro Chat") as demo:
|
|
| 150 |
- **Version**: choose `instruct` to enable the model's chat template.
|
| 151 |
- **Streaming** is enabled. Use the controls in the right panel.
|
| 152 |
""")
|
|
|
|
|
|
|
|
|
|
|
|
|
| 153 |
|
| 154 |
with gr.Row():
|
| 155 |
with gr.Column(scale=3):
|
|
@@ -221,4 +227,4 @@ with gr.Blocks(title="MobileLLM-Pro Chat") as demo:
|
|
| 221 |
|
| 222 |
if __name__ == "__main__":
|
| 223 |
# For Spaces, Gradio will call `demo.launch()` automatically; locally we launch here.
|
| 224 |
-
demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", 7860)))
|
|
|
|
| 10 |
)
|
| 11 |
from huggingface_hub import login
|
| 12 |
import threading
|
| 13 |
+
import spaces
|
| 14 |
|
| 15 |
"""
|
| 16 |
Gradio chat app for facebook/MobileLLM-Pro
|
|
|
|
| 78 |
return messages
|
| 79 |
|
| 80 |
|
| 81 |
+
@spaces.GPU(duration=120)
|
| 82 |
def generate_stream(
|
| 83 |
message: str,
|
| 84 |
history: List[Tuple[str, str]],
|
|
|
|
| 152 |
- **Version**: choose `instruct` to enable the model's chat template.
|
| 153 |
- **Streaming** is enabled. Use the controls in the right panel.
|
| 154 |
""")
|
| 155 |
+
gr.Markdown(
|
| 156 |
+
"<div style='text-align: center;'>Built with <a href='https://huggingface.co/spaces/akhaliq/anycoder'>anycoder</a></div>",
|
| 157 |
+
elem_id="anycoder_attribution"
|
| 158 |
+
)
|
| 159 |
|
| 160 |
with gr.Row():
|
| 161 |
with gr.Column(scale=3):
|
|
|
|
| 227 |
|
| 228 |
if __name__ == "__main__":
|
| 229 |
# For Spaces, Gradio will call `demo.launch()` automatically; locally we launch here.
|
| 230 |
+
demo.launch(server_name="0.0.0.0", server_port=int(os.getenv("PORT", 7860)))
|
requirements.txt
CHANGED
|
@@ -1,2 +1,3 @@
|
|
| 1 |
torch
|
| 2 |
transformers
|
|
|
|
|
|
| 1 |
torch
|
| 2 |
transformers
|
| 3 |
+
accelerate
|