Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -16,14 +16,14 @@ from colpali_engine.models import ColQwen2, ColQwen2Processor
|
|
| 16 |
def install_fa2():
|
| 17 |
print("Install FA2")
|
| 18 |
os.system("pip install flash-attn --no-build-isolation")
|
| 19 |
-
install_fa2()
|
| 20 |
|
| 21 |
|
| 22 |
model = ColQwen2.from_pretrained(
|
| 23 |
"manu/colqwen2-v1.0-alpha",
|
| 24 |
torch_dtype=torch.bfloat16,
|
| 25 |
device_map="cuda:0", # or "mps" if on Apple Silicon
|
| 26 |
-
attn_implementation="flash_attention_2", # should work on A100
|
| 27 |
).eval()
|
| 28 |
processor = ColQwen2Processor.from_pretrained("manu/colqwen2-v1.0-alpha")
|
| 29 |
|
|
|
|
| 16 |
def install_fa2():
|
| 17 |
print("Install FA2")
|
| 18 |
os.system("pip install flash-attn --no-build-isolation")
|
| 19 |
+
# install_fa2()
|
| 20 |
|
| 21 |
|
| 22 |
model = ColQwen2.from_pretrained(
|
| 23 |
"manu/colqwen2-v1.0-alpha",
|
| 24 |
torch_dtype=torch.bfloat16,
|
| 25 |
device_map="cuda:0", # or "mps" if on Apple Silicon
|
| 26 |
+
# attn_implementation="flash_attention_2", # should work on A100
|
| 27 |
).eval()
|
| 28 |
processor = ColQwen2Processor.from_pretrained("manu/colqwen2-v1.0-alpha")
|
| 29 |
|