Spaces:
Running
Running
GSK-2814-error-message-when-hf-token-wrong
#103
by
ZeroCommand
- opened
app_text_classification.py
CHANGED
|
@@ -8,13 +8,13 @@ from text_classification_ui_helpers import (
|
|
| 8 |
align_columns_and_show_prediction,
|
| 9 |
check_dataset,
|
| 10 |
precheck_model_ds_enable_example_btn,
|
|
|
|
| 11 |
try_submit,
|
| 12 |
write_column_mapping_to_config,
|
| 13 |
)
|
| 14 |
|
| 15 |
from text_classification import (
|
| 16 |
get_example_prediction,
|
| 17 |
-
check_hf_token_validity,
|
| 18 |
HuggingFaceInferenceAPIResponse
|
| 19 |
)
|
| 20 |
from wordings import (
|
|
@@ -104,7 +104,7 @@ def get_demo():
|
|
| 104 |
inference_token_info = gr.HTML(value=HF_TOKEN_INVALID_STYLED, visible=False)
|
| 105 |
|
| 106 |
inference_token.change(
|
| 107 |
-
|
| 108 |
inputs=[inference_token],
|
| 109 |
outputs=[inference_token_info],
|
| 110 |
)
|
|
@@ -244,11 +244,7 @@ def get_demo():
|
|
| 244 |
return gr.update(interactive=False)
|
| 245 |
if not column_mapping_accordion.visible:
|
| 246 |
return gr.update(interactive=False)
|
| 247 |
-
|
| 248 |
-
model_id, dataset_id, dataset_config, dataset_split, inference_token
|
| 249 |
-
)
|
| 250 |
-
if not isinstance(prediction_response, HuggingFaceInferenceAPIResponse):
|
| 251 |
-
gr.warning("Your HF token is invalid. Please check your token.")
|
| 252 |
return gr.update(interactive=False)
|
| 253 |
return gr.update(interactive=True)
|
| 254 |
|
|
|
|
| 8 |
align_columns_and_show_prediction,
|
| 9 |
check_dataset,
|
| 10 |
precheck_model_ds_enable_example_btn,
|
| 11 |
+
show_hf_token_info,
|
| 12 |
try_submit,
|
| 13 |
write_column_mapping_to_config,
|
| 14 |
)
|
| 15 |
|
| 16 |
from text_classification import (
|
| 17 |
get_example_prediction,
|
|
|
|
| 18 |
HuggingFaceInferenceAPIResponse
|
| 19 |
)
|
| 20 |
from wordings import (
|
|
|
|
| 104 |
inference_token_info = gr.HTML(value=HF_TOKEN_INVALID_STYLED, visible=False)
|
| 105 |
|
| 106 |
inference_token.change(
|
| 107 |
+
fn=show_hf_token_info,
|
| 108 |
inputs=[inference_token],
|
| 109 |
outputs=[inference_token_info],
|
| 110 |
)
|
|
|
|
| 244 |
return gr.update(interactive=False)
|
| 245 |
if not column_mapping_accordion.visible:
|
| 246 |
return gr.update(interactive=False)
|
| 247 |
+
if inference_token_info.visible:
|
|
|
|
|
|
|
|
|
|
|
|
|
| 248 |
return gr.update(interactive=False)
|
| 249 |
return gr.update(interactive=True)
|
| 250 |
|
text_classification_ui_helpers.py
CHANGED
|
@@ -16,6 +16,7 @@ from text_classification import (
|
|
| 16 |
preload_hf_inference_api,
|
| 17 |
get_example_prediction,
|
| 18 |
get_labels_and_features_from_dataset,
|
|
|
|
| 19 |
HuggingFaceInferenceAPIResponse,
|
| 20 |
)
|
| 21 |
from wordings import (
|
|
@@ -331,6 +332,12 @@ def construct_label_and_feature_mapping(all_mappings, ds_labels, ds_features):
|
|
| 331 |
feature_mapping = all_mappings["features"]
|
| 332 |
return label_mapping, feature_mapping
|
| 333 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 334 |
def try_submit(m_id, d_id, config, split, inference, inference_token, uid):
|
| 335 |
all_mappings = read_column_mapping(uid)
|
| 336 |
check_column_mapping_keys_validity(all_mappings)
|
|
|
|
| 16 |
preload_hf_inference_api,
|
| 17 |
get_example_prediction,
|
| 18 |
get_labels_and_features_from_dataset,
|
| 19 |
+
check_hf_token_validity,
|
| 20 |
HuggingFaceInferenceAPIResponse,
|
| 21 |
)
|
| 22 |
from wordings import (
|
|
|
|
| 332 |
feature_mapping = all_mappings["features"]
|
| 333 |
return label_mapping, feature_mapping
|
| 334 |
|
| 335 |
+
def show_hf_token_info(token):
|
| 336 |
+
valid = check_hf_token_validity(token)
|
| 337 |
+
if not valid:
|
| 338 |
+
return gr.update(visible=True)
|
| 339 |
+
return gr.update(visible=False)
|
| 340 |
+
|
| 341 |
def try_submit(m_id, d_id, config, split, inference, inference_token, uid):
|
| 342 |
all_mappings = read_column_mapping(uid)
|
| 343 |
check_column_mapping_keys_validity(all_mappings)
|