Spaces:
Sleeping
Sleeping
Commit
·
40e6fe6
1
Parent(s):
d2a7626
remove to.cuda
Browse files
app.py
CHANGED
|
@@ -10,7 +10,6 @@ import json
|
|
| 10 |
import bs4
|
| 11 |
import os
|
| 12 |
os.environ["USE_FLASH_ATTENTION"] = "0"
|
| 13 |
-
device = "cuda" if torch.cuda.is_available() else "cpu"
|
| 14 |
|
| 15 |
product_strings = []
|
| 16 |
recipe_strings = []
|
|
@@ -172,7 +171,6 @@ def respond(
|
|
| 172 |
Frage: {query}
|
| 173 |
<|im_end|>
|
| 174 |
<|im_start|>assistant"""
|
| 175 |
-
system_message = system_message.to("cuda") if torch.cuda.is_available() else system_message
|
| 176 |
refined_context = generator[1](system_message, do_sample=True, padding=True, truncation=True, top_p=0.95, max_new_tokens=150)
|
| 177 |
# Retrieve relevant context from Qdrant
|
| 178 |
if "rezept" in query.lower() or "gericht" in query.lower():
|
|
|
|
| 10 |
import bs4
|
| 11 |
import os
|
| 12 |
os.environ["USE_FLASH_ATTENTION"] = "0"
|
|
|
|
| 13 |
|
| 14 |
product_strings = []
|
| 15 |
recipe_strings = []
|
|
|
|
| 171 |
Frage: {query}
|
| 172 |
<|im_end|>
|
| 173 |
<|im_start|>assistant"""
|
|
|
|
| 174 |
refined_context = generator[1](system_message, do_sample=True, padding=True, truncation=True, top_p=0.95, max_new_tokens=150)
|
| 175 |
# Retrieve relevant context from Qdrant
|
| 176 |
if "rezept" in query.lower() or "gericht" in query.lower():
|