Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -18,8 +18,8 @@ device = "cuda" if torch.cuda.is_available() else "cpu"
|
|
| 18 |
embedding_model = BGEM3FlagModel('BAAI/bge-m3',
|
| 19 |
use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation
|
| 20 |
|
| 21 |
-
embeddings = np.load("
|
| 22 |
-
embeddings_data = pd.read_json("
|
| 23 |
embeddings_text = embeddings_data["text_with_context"].tolist()
|
| 24 |
|
| 25 |
# Define the device
|
|
|
|
| 18 |
embedding_model = BGEM3FlagModel('BAAI/bge-m3',
|
| 19 |
use_fp16=True) # Setting use_fp16 to True speeds up computation with a slight performance degradation
|
| 20 |
|
| 21 |
+
embeddings = np.load("embeddings_albert_tchap.npy")
|
| 22 |
+
embeddings_data = pd.read_json("embeddings_albert_tchap.json")
|
| 23 |
embeddings_text = embeddings_data["text_with_context"].tolist()
|
| 24 |
|
| 25 |
# Define the device
|