Spaces:
Running
on
Zero
Running
on
Zero
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
| 1 |
import os
|
| 2 |
-
import threading
|
| 3 |
import time
|
| 4 |
-
import
|
| 5 |
import torch
|
| 6 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 7 |
import gradio as gr
|
|
@@ -43,8 +43,8 @@ tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, trust_remote_code=True)
|
|
| 43 |
|
| 44 |
model = model.eval()
|
| 45 |
|
| 46 |
-
|
| 47 |
-
|
| 48 |
conversation = []
|
| 49 |
for prompt, answer in history:
|
| 50 |
conversation.extend([
|
|
|
|
| 1 |
import os
|
| 2 |
+
import threading as Thread
|
| 3 |
import time
|
| 4 |
+
import spaces
|
| 5 |
import torch
|
| 6 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
| 7 |
import gradio as gr
|
|
|
|
| 43 |
|
| 44 |
model = model.eval()
|
| 45 |
|
| 46 |
+
@spaces.GPU()
|
| 47 |
+
def stream_chat(message: str, history: list, temperature: float, max_new_tokens: int, top_p: float, top_k: int, penalty: float):
|
| 48 |
conversation = []
|
| 49 |
for prompt, answer in history:
|
| 50 |
conversation.extend([
|