Spaces:
Running
Running
Commit
·
2f7dd9a
1
Parent(s):
79d13cd
check if config submitted path exist
Browse files- app_debug.py +3 -1
- text_classification.py +1 -1
- text_classification_ui_helpers.py +2 -1
app_debug.py
CHANGED
|
@@ -3,7 +3,7 @@ from os.path import isfile, join
|
|
| 3 |
import html
|
| 4 |
|
| 5 |
import gradio as gr
|
| 6 |
-
|
| 7 |
import pipe
|
| 8 |
from io_utils import get_logs_file
|
| 9 |
|
|
@@ -69,6 +69,8 @@ def get_queue_status():
|
|
| 69 |
|
| 70 |
|
| 71 |
def get_demo():
|
|
|
|
|
|
|
| 72 |
with gr.Row():
|
| 73 |
gr.HTML(
|
| 74 |
value=get_queue_status,
|
|
|
|
| 3 |
import html
|
| 4 |
|
| 5 |
import gradio as gr
|
| 6 |
+
import os
|
| 7 |
import pipe
|
| 8 |
from io_utils import get_logs_file
|
| 9 |
|
|
|
|
| 69 |
|
| 70 |
|
| 71 |
def get_demo():
|
| 72 |
+
if not os.path.exists(CONFIG_PATH):
|
| 73 |
+
os.makedirs(CONFIG_PATH)
|
| 74 |
with gr.Row():
|
| 75 |
gr.HTML(
|
| 76 |
value=get_queue_status,
|
text_classification.py
CHANGED
|
@@ -7,9 +7,9 @@ import pandas as pd
|
|
| 7 |
from transformers import pipeline
|
| 8 |
import requests
|
| 9 |
import os
|
|
|
|
| 10 |
|
| 11 |
logger = logging.getLogger(__name__)
|
| 12 |
-
HF_WRITE_TOKEN = "HF_WRITE_TOKEN"
|
| 13 |
AUTH_CHECK_URL = "https://huggingface.co/api/whoami-v2"
|
| 14 |
|
| 15 |
logger = logging.getLogger(__file__)
|
|
|
|
| 7 |
from transformers import pipeline
|
| 8 |
import requests
|
| 9 |
import os
|
| 10 |
+
from app_env import HF_WRITE_TOKEN
|
| 11 |
|
| 12 |
logger = logging.getLogger(__name__)
|
|
|
|
| 13 |
AUTH_CHECK_URL = "https://huggingface.co/api/whoami-v2"
|
| 14 |
|
| 15 |
logger = logging.getLogger(__file__)
|
text_classification_ui_helpers.py
CHANGED
|
@@ -35,6 +35,7 @@ from wordings import (
|
|
| 35 |
get_dataset_fetch_error_raw,
|
| 36 |
)
|
| 37 |
import os
|
|
|
|
| 38 |
|
| 39 |
MAX_LABELS = 40
|
| 40 |
MAX_FEATURES = 20
|
|
@@ -268,7 +269,7 @@ def align_columns_and_show_prediction(
|
|
| 268 |
gr.Dropdown(visible=False) for _ in range(MAX_LABELS + MAX_FEATURES)
|
| 269 |
]
|
| 270 |
|
| 271 |
-
hf_token = os.environ.get(
|
| 272 |
|
| 273 |
prediction_input, prediction_response = get_example_prediction(
|
| 274 |
model_id, dataset_id, dataset_config, dataset_split, hf_token
|
|
|
|
| 35 |
get_dataset_fetch_error_raw,
|
| 36 |
)
|
| 37 |
import os
|
| 38 |
+
from app_env import HF_WRITE_TOKEN
|
| 39 |
|
| 40 |
MAX_LABELS = 40
|
| 41 |
MAX_FEATURES = 20
|
|
|
|
| 269 |
gr.Dropdown(visible=False) for _ in range(MAX_LABELS + MAX_FEATURES)
|
| 270 |
]
|
| 271 |
|
| 272 |
+
hf_token = os.environ.get(HF_WRITE_TOKEN, default="")
|
| 273 |
|
| 274 |
prediction_input, prediction_response = get_example_prediction(
|
| 275 |
model_id, dataset_id, dataset_config, dataset_split, hf_token
|