Spaces:
Running
on
Zero
Running
on
Zero
Upload 2 files
Browse files- modutils.py +9 -3
- requirements.txt +3 -1
modutils.py
CHANGED
|
@@ -508,12 +508,18 @@ def get_t2i_model_info(repo_id: str):
|
|
| 508 |
return gr.update(value=md)
|
| 509 |
|
| 510 |
|
|
|
|
|
|
|
|
|
|
| 511 |
def get_tupled_model_list(model_list):
|
| 512 |
if not model_list: return []
|
| 513 |
#return [(x, x) for x in model_list] # for skipping this function
|
| 514 |
tupled_list = []
|
| 515 |
-
|
| 516 |
-
|
|
|
|
|
|
|
|
|
|
| 517 |
try:
|
| 518 |
if not api.repo_exists(repo_id): continue
|
| 519 |
model = api.model_info(repo_id=repo_id, timeout=0.5)
|
|
@@ -521,7 +527,7 @@ def get_tupled_model_list(model_list):
|
|
| 521 |
print(f"{repo_id}: {e}")
|
| 522 |
tupled_list.append((repo_id, repo_id))
|
| 523 |
continue
|
| 524 |
-
if model.
|
| 525 |
tags = model.tags
|
| 526 |
info = []
|
| 527 |
if not 'diffusers' in tags: continue
|
|
|
|
| 508 |
return gr.update(value=md)
|
| 509 |
|
| 510 |
|
| 511 |
+
MAX_MODEL_INFO = 100
|
| 512 |
+
|
| 513 |
+
|
| 514 |
def get_tupled_model_list(model_list):
|
| 515 |
if not model_list: return []
|
| 516 |
#return [(x, x) for x in model_list] # for skipping this function
|
| 517 |
tupled_list = []
|
| 518 |
+
api = HfApi()
|
| 519 |
+
for i, repo_id in enumerate(model_list):
|
| 520 |
+
if i > MAX_MODEL_INFO:
|
| 521 |
+
tupled_list.append((repo_id, repo_id))
|
| 522 |
+
continue
|
| 523 |
try:
|
| 524 |
if not api.repo_exists(repo_id): continue
|
| 525 |
model = api.model_info(repo_id=repo_id, timeout=0.5)
|
|
|
|
| 527 |
print(f"{repo_id}: {e}")
|
| 528 |
tupled_list.append((repo_id, repo_id))
|
| 529 |
continue
|
| 530 |
+
if model.tags is None: continue
|
| 531 |
tags = model.tags
|
| 532 |
info = []
|
| 533 |
if not 'diffusers' in tags: continue
|
requirements.txt
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
stablepy==0.6.1
|
| 2 |
-
diffusers
|
| 3 |
transformers==4.47.1
|
| 4 |
torch==2.4.0
|
| 5 |
numpy<2
|
|
@@ -10,6 +10,8 @@ accelerate
|
|
| 10 |
optimum[onnxruntime]
|
| 11 |
dartrs
|
| 12 |
huggingface_hub
|
|
|
|
|
|
|
| 13 |
translatepy
|
| 14 |
timm
|
| 15 |
rapidfuzz
|
|
|
|
| 1 |
stablepy==0.6.1
|
| 2 |
+
diffusers<=0.32.0
|
| 3 |
transformers==4.47.1
|
| 4 |
torch==2.4.0
|
| 5 |
numpy<2
|
|
|
|
| 10 |
optimum[onnxruntime]
|
| 11 |
dartrs
|
| 12 |
huggingface_hub
|
| 13 |
+
hf_transfer
|
| 14 |
+
hf_xet
|
| 15 |
translatepy
|
| 16 |
timm
|
| 17 |
rapidfuzz
|