Spaces:
Runtime error
Runtime error
Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
|
@@ -47,28 +47,32 @@ def download_and_load_model():
|
|
| 47 |
try:
|
| 48 |
print("🔄 Downloading multimodal Gemma model from HF...")
|
| 49 |
|
| 50 |
-
# Download model checkpoint
|
| 51 |
checkpoint_path = hf_hub_download(
|
| 52 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 53 |
filename="final_model.ckpt",
|
| 54 |
-
cache_dir="./model_cache"
|
|
|
|
| 55 |
)
|
| 56 |
|
| 57 |
# Download config files (same as local setup)
|
| 58 |
model_config_path = hf_hub_download(
|
| 59 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 60 |
filename="configs/model_config.yaml",
|
| 61 |
-
cache_dir="./model_cache"
|
|
|
|
| 62 |
)
|
| 63 |
training_config_path = hf_hub_download(
|
| 64 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 65 |
filename="configs/training_config.yaml",
|
| 66 |
-
cache_dir="./model_cache"
|
|
|
|
| 67 |
)
|
| 68 |
data_config_path = hf_hub_download(
|
| 69 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 70 |
filename="configs/data_config.yaml",
|
| 71 |
-
cache_dir="./model_cache"
|
|
|
|
| 72 |
)
|
| 73 |
|
| 74 |
# Load configs exactly like local gradio_app.py
|
|
|
|
| 47 |
try:
|
| 48 |
print("🔄 Downloading multimodal Gemma model from HF...")
|
| 49 |
|
| 50 |
+
# Download model checkpoint (using token if available)
|
| 51 |
checkpoint_path = hf_hub_download(
|
| 52 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 53 |
filename="final_model.ckpt",
|
| 54 |
+
cache_dir="./model_cache",
|
| 55 |
+
token=hf_token if 'hf_token' in locals() else None
|
| 56 |
)
|
| 57 |
|
| 58 |
# Download config files (same as local setup)
|
| 59 |
model_config_path = hf_hub_download(
|
| 60 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 61 |
filename="configs/model_config.yaml",
|
| 62 |
+
cache_dir="./model_cache",
|
| 63 |
+
token=hf_token if 'hf_token' in locals() else None
|
| 64 |
)
|
| 65 |
training_config_path = hf_hub_download(
|
| 66 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 67 |
filename="configs/training_config.yaml",
|
| 68 |
+
cache_dir="./model_cache",
|
| 69 |
+
token=hf_token if 'hf_token' in locals() else None
|
| 70 |
)
|
| 71 |
data_config_path = hf_hub_download(
|
| 72 |
repo_id="sagar007/multimodal-gemma-270m-llava",
|
| 73 |
filename="configs/data_config.yaml",
|
| 74 |
+
cache_dir="./model_cache",
|
| 75 |
+
token=hf_token if 'hf_token' in locals() else None
|
| 76 |
)
|
| 77 |
|
| 78 |
# Load configs exactly like local gradio_app.py
|