sagar007 commited on
Commit
3ef1f55
Β·
verified Β·
1 Parent(s): 9d643d5

Upload app.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. app.py +13 -35
app.py CHANGED
@@ -37,49 +37,27 @@ from src.utils.config import load_config, merge_configs
37
  model = None
38
  config = None
39
 
40
- def download_and_load_model():
41
- """Download and load the trained multimodal model from HF"""
42
  global model, config
43
 
44
  if model is not None:
45
  return "βœ… Model already loaded!"
46
 
47
  try:
48
- print("πŸ”„ Downloading multimodal Gemma model from HF...")
49
-
50
- # Download model checkpoint (using token if available)
51
- checkpoint_path = hf_hub_download(
52
- repo_id="sagar007/multimodal-gemma-270m-llava",
53
- filename="final_model.ckpt",
54
- cache_dir="./model_cache",
55
- token=hf_token if 'hf_token' in locals() else None
56
- )
57
 
58
- # Download config files (same as local setup)
59
- model_config_path = hf_hub_download(
60
- repo_id="sagar007/multimodal-gemma-270m-llava",
61
- filename="configs/model_config.yaml",
62
- cache_dir="./model_cache",
63
- token=hf_token if 'hf_token' in locals() else None
64
- )
65
- training_config_path = hf_hub_download(
66
- repo_id="sagar007/multimodal-gemma-270m-llava",
67
- filename="configs/training_config.yaml",
68
- cache_dir="./model_cache",
69
- token=hf_token if 'hf_token' in locals() else None
70
- )
71
- data_config_path = hf_hub_download(
72
- repo_id="sagar007/multimodal-gemma-270m-llava",
73
- filename="configs/data_config.yaml",
74
- cache_dir="./model_cache",
75
- token=hf_token if 'hf_token' in locals() else None
76
- )
77
 
78
- # Load configs exactly like local gradio_app.py
79
  print("πŸ“ Loading configs...")
80
- model_config = load_config(model_config_path)
81
- training_config = load_config(training_config_path)
82
- data_config = load_config(data_config_path)
83
  config = merge_configs([model_config, training_config, data_config])
84
 
85
  print("πŸ“ Loading model from checkpoint...")
@@ -334,7 +312,7 @@ def create_gradio_interface():
334
 
335
  # Event handlers
336
  load_btn.click(
337
- fn=download_and_load_model,
338
  outputs=model_status
339
  )
340
 
 
37
  model = None
38
  config = None
39
 
40
+ def load_model():
41
+ """Load the trained multimodal model from local files"""
42
  global model, config
43
 
44
  if model is not None:
45
  return "βœ… Model already loaded!"
46
 
47
  try:
48
+ print("πŸ”„ Loading multimodal Gemma model from local files...")
 
 
 
 
 
 
 
 
49
 
50
+ # Use local model checkpoint (included in Space)
51
+ checkpoint_path = "final_model.ckpt"
52
+
53
+ if not Path(checkpoint_path).exists():
54
+ return "❌ Model checkpoint not found! Please ensure final_model.ckpt is in the Space."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
 
56
+ # Use local config files (same as local setup)
57
  print("πŸ“ Loading configs...")
58
+ model_config = load_config("configs/model_config.yaml")
59
+ training_config = load_config("configs/training_config.yaml")
60
+ data_config = load_config("configs/data_config.yaml")
61
  config = merge_configs([model_config, training_config, data_config])
62
 
63
  print("πŸ“ Loading model from checkpoint...")
 
312
 
313
  # Event handlers
314
  load_btn.click(
315
+ fn=load_model,
316
  outputs=model_status
317
  )
318