Spaces:
Running
Running
caw2rng
commited on
Commit
·
7055650
1
Parent(s):
0fb5a8c
use full hf comp code
Browse files- competitions +0 -1
- competitions/__init__.py +6 -0
- competitions/api.py +109 -0
- competitions/app.py +447 -0
- competitions/cli/__init__.py +13 -0
- competitions/cli/competitions.py +38 -0
- competitions/cli/create.py +20 -0
- competitions/cli/run.py +27 -0
- competitions/cli/submit.py +49 -0
- competitions/compute_metrics.py +73 -0
- competitions/create.py +348 -0
- competitions/download.py +206 -0
- competitions/enums.py +14 -0
- competitions/errors.py +18 -0
- competitions/evaluate.py +135 -0
- competitions/info.py +197 -0
- competitions/leaderboard.py +230 -0
- competitions/oauth.py +122 -0
- competitions/params.py +34 -0
- competitions/runner.py +230 -0
- competitions/static/.keep +0 -0
- competitions/submissions.py +336 -0
- competitions/templates/index.html +951 -0
- competitions/tests/test_dummy.py +2 -0
- competitions/text.py +17 -0
- competitions/utils.py +386 -0
competitions
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
Subproject commit 447fff164c586ae3d42617c7726d8563514c3f17
|
|
|
|
competitions/__init__.py
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
|
4 |
+
__version__ = "0.1.9.dev0"
|
5 |
+
|
6 |
+
HF_URL = os.getenv("HF_URL", "https://huggingface.co")
|
competitions/api.py
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import asyncio
|
2 |
+
import os
|
3 |
+
import signal
|
4 |
+
import sqlite3
|
5 |
+
from contextlib import asynccontextmanager
|
6 |
+
|
7 |
+
import psutil
|
8 |
+
from fastapi import FastAPI
|
9 |
+
from loguru import logger
|
10 |
+
|
11 |
+
from competitions.utils import run_evaluation
|
12 |
+
|
13 |
+
|
14 |
+
def get_process_status(pid):
|
15 |
+
try:
|
16 |
+
process = psutil.Process(pid)
|
17 |
+
proc_status = process.status()
|
18 |
+
return proc_status
|
19 |
+
except psutil.NoSuchProcess:
|
20 |
+
logger.info(f"No process found with PID: {pid}")
|
21 |
+
return "Completed"
|
22 |
+
|
23 |
+
|
24 |
+
def kill_process_by_pid(pid):
|
25 |
+
"""Kill process by PID."""
|
26 |
+
os.kill(pid, signal.SIGTERM)
|
27 |
+
|
28 |
+
|
29 |
+
class JobDB:
|
30 |
+
def __init__(self, db_path):
|
31 |
+
self.db_path = db_path
|
32 |
+
self.conn = sqlite3.connect(db_path)
|
33 |
+
self.c = self.conn.cursor()
|
34 |
+
self.create_jobs_table()
|
35 |
+
|
36 |
+
def create_jobs_table(self):
|
37 |
+
self.c.execute(
|
38 |
+
"""CREATE TABLE IF NOT EXISTS jobs
|
39 |
+
(id INTEGER PRIMARY KEY, pid INTEGER)"""
|
40 |
+
)
|
41 |
+
self.conn.commit()
|
42 |
+
|
43 |
+
def add_job(self, pid):
|
44 |
+
sql = f"INSERT INTO jobs (pid) VALUES ({pid})"
|
45 |
+
self.c.execute(sql)
|
46 |
+
self.conn.commit()
|
47 |
+
|
48 |
+
def get_running_jobs(self):
|
49 |
+
self.c.execute("""SELECT pid FROM jobs""")
|
50 |
+
running_pids = self.c.fetchall()
|
51 |
+
running_pids = [pid[0] for pid in running_pids]
|
52 |
+
return running_pids
|
53 |
+
|
54 |
+
def delete_job(self, pid):
|
55 |
+
sql = f"DELETE FROM jobs WHERE pid={pid}"
|
56 |
+
self.c.execute(sql)
|
57 |
+
self.conn.commit()
|
58 |
+
|
59 |
+
|
60 |
+
PARAMS = os.environ.get("PARAMS")
|
61 |
+
DB = JobDB("job.db")
|
62 |
+
|
63 |
+
|
64 |
+
class BackgroundRunner:
|
65 |
+
async def run_main(self):
|
66 |
+
while True:
|
67 |
+
running_jobs = DB.get_running_jobs()
|
68 |
+
if running_jobs:
|
69 |
+
for _pid in running_jobs:
|
70 |
+
proc_status = get_process_status(_pid)
|
71 |
+
proc_status = proc_status.strip().lower()
|
72 |
+
if proc_status in ("completed", "error", "zombie"):
|
73 |
+
logger.info(f"Process {_pid} is already completed. Skipping...")
|
74 |
+
try:
|
75 |
+
kill_process_by_pid(_pid)
|
76 |
+
except Exception as e:
|
77 |
+
logger.info(f"Error while killing process: {e}")
|
78 |
+
DB.delete_job(_pid)
|
79 |
+
|
80 |
+
running_jobs = DB.get_running_jobs()
|
81 |
+
if not running_jobs:
|
82 |
+
logger.info("No running jobs found. Shutting down the server.")
|
83 |
+
os.kill(os.getpid(), signal.SIGINT)
|
84 |
+
await asyncio.sleep(30)
|
85 |
+
|
86 |
+
|
87 |
+
runner = BackgroundRunner()
|
88 |
+
|
89 |
+
|
90 |
+
@asynccontextmanager
|
91 |
+
async def lifespan(app: FastAPI):
|
92 |
+
process_pid = run_evaluation(params=PARAMS)
|
93 |
+
logger.info(f"Started training with PID {process_pid}")
|
94 |
+
DB.add_job(process_pid)
|
95 |
+
asyncio.create_task(runner.run_main())
|
96 |
+
yield
|
97 |
+
|
98 |
+
|
99 |
+
api = FastAPI(lifespan=lifespan)
|
100 |
+
|
101 |
+
|
102 |
+
@api.get("/")
|
103 |
+
async def root():
|
104 |
+
return "Your model is being evaluated..."
|
105 |
+
|
106 |
+
|
107 |
+
@api.get("/health")
|
108 |
+
async def health():
|
109 |
+
return "OK"
|
competitions/app.py
ADDED
@@ -0,0 +1,447 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import datetime
|
2 |
+
import os
|
3 |
+
import threading
|
4 |
+
import time
|
5 |
+
|
6 |
+
from fastapi import Depends, FastAPI, File, Form, HTTPException, Request, UploadFile
|
7 |
+
from fastapi.responses import HTMLResponse, JSONResponse
|
8 |
+
from fastapi.staticfiles import StaticFiles
|
9 |
+
from fastapi.templating import Jinja2Templates
|
10 |
+
from huggingface_hub import hf_hub_download
|
11 |
+
from huggingface_hub.utils import disable_progress_bars
|
12 |
+
from huggingface_hub.utils._errors import EntryNotFoundError
|
13 |
+
from loguru import logger
|
14 |
+
from pydantic import BaseModel
|
15 |
+
from requests.exceptions import RequestException
|
16 |
+
|
17 |
+
from competitions import __version__, utils
|
18 |
+
from competitions.errors import AuthenticationError, PastDeadlineError, SubmissionError, SubmissionLimitError
|
19 |
+
from competitions.info import CompetitionInfo
|
20 |
+
from competitions.leaderboard import Leaderboard
|
21 |
+
from competitions.oauth import attach_oauth
|
22 |
+
from competitions.runner import JobRunner
|
23 |
+
from competitions.submissions import Submissions
|
24 |
+
from competitions.text import SUBMISSION_SELECTION_TEXT, SUBMISSION_TEXT
|
25 |
+
|
26 |
+
|
27 |
+
HF_TOKEN = os.environ.get("HF_TOKEN", None)
|
28 |
+
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
|
29 |
+
COMPETITION_ID = os.environ.get("COMPETITION_ID")
|
30 |
+
OUTPUT_PATH = os.environ.get("OUTPUT_PATH", "/tmp/model")
|
31 |
+
START_DATE = os.environ.get("START_DATE", "2000-12-31")
|
32 |
+
DISABLE_PUBLIC_LB = int(os.environ.get("DISABLE_PUBLIC_LB", 0))
|
33 |
+
|
34 |
+
disable_progress_bars()
|
35 |
+
|
36 |
+
try:
|
37 |
+
REQUIREMENTS_FNAME = hf_hub_download(
|
38 |
+
repo_id=COMPETITION_ID,
|
39 |
+
filename="requirements.txt",
|
40 |
+
token=HF_TOKEN,
|
41 |
+
repo_type="dataset",
|
42 |
+
)
|
43 |
+
except EntryNotFoundError:
|
44 |
+
REQUIREMENTS_FNAME = None
|
45 |
+
|
46 |
+
if REQUIREMENTS_FNAME:
|
47 |
+
logger.info("Uninstalling and installing requirements")
|
48 |
+
utils.uninstall_requirements(REQUIREMENTS_FNAME)
|
49 |
+
utils.install_requirements(REQUIREMENTS_FNAME)
|
50 |
+
|
51 |
+
|
52 |
+
class LeaderboardRequest(BaseModel):
|
53 |
+
lb: str
|
54 |
+
|
55 |
+
|
56 |
+
class UpdateSelectedSubmissionsRequest(BaseModel):
|
57 |
+
submission_ids: str
|
58 |
+
|
59 |
+
|
60 |
+
class UpdateTeamNameRequest(BaseModel):
|
61 |
+
new_team_name: str
|
62 |
+
|
63 |
+
|
64 |
+
def run_job_runner():
|
65 |
+
job_runner = JobRunner(
|
66 |
+
competition_id=COMPETITION_ID,
|
67 |
+
token=HF_TOKEN,
|
68 |
+
output_path=OUTPUT_PATH,
|
69 |
+
)
|
70 |
+
job_runner.run()
|
71 |
+
|
72 |
+
|
73 |
+
def start_job_runner_thread():
|
74 |
+
thread = threading.Thread(target=run_job_runner)
|
75 |
+
# thread.daemon = True
|
76 |
+
thread.start()
|
77 |
+
return thread
|
78 |
+
|
79 |
+
|
80 |
+
def watchdog(job_runner_thread):
|
81 |
+
while True:
|
82 |
+
if not job_runner_thread.is_alive():
|
83 |
+
logger.warning("Job runner thread stopped. Restarting...")
|
84 |
+
job_runner_thread = start_job_runner_thread()
|
85 |
+
time.sleep(10)
|
86 |
+
|
87 |
+
|
88 |
+
job_runner_thread = start_job_runner_thread()
|
89 |
+
watchdog_thread = threading.Thread(target=watchdog, args=(job_runner_thread,))
|
90 |
+
watchdog_thread.daemon = True
|
91 |
+
watchdog_thread.start()
|
92 |
+
|
93 |
+
|
94 |
+
app = FastAPI()
|
95 |
+
attach_oauth(app)
|
96 |
+
|
97 |
+
static_path = os.path.join(BASE_DIR, "static")
|
98 |
+
app.mount("/static", StaticFiles(directory=static_path), name="static")
|
99 |
+
templates_path = os.path.join(BASE_DIR, "templates")
|
100 |
+
templates = Jinja2Templates(directory=templates_path)
|
101 |
+
|
102 |
+
|
103 |
+
@app.get("/", response_class=HTMLResponse)
|
104 |
+
async def read_form(request: Request):
|
105 |
+
"""
|
106 |
+
This function is used to render the HTML file
|
107 |
+
:param request:
|
108 |
+
:return:
|
109 |
+
"""
|
110 |
+
if HF_TOKEN is None:
|
111 |
+
return HTTPException(status_code=500, detail="HF_TOKEN is not set.")
|
112 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
113 |
+
context = {
|
114 |
+
"request": request,
|
115 |
+
"logo": competition_info.logo_url,
|
116 |
+
"competition_type": competition_info.competition_type,
|
117 |
+
"version": __version__,
|
118 |
+
"rules_available": competition_info.rules is not None,
|
119 |
+
}
|
120 |
+
return templates.TemplateResponse("index.html", context)
|
121 |
+
|
122 |
+
|
123 |
+
@app.get("/login_status", response_class=JSONResponse)
|
124 |
+
async def use_oauth(request: Request, user_token: str = Depends(utils.user_authentication)):
|
125 |
+
if user_token:
|
126 |
+
return {"response": 2}
|
127 |
+
return {"response": 1}
|
128 |
+
|
129 |
+
|
130 |
+
@app.get("/logout", response_class=HTMLResponse)
|
131 |
+
async def user_logout(request: Request):
|
132 |
+
"""Endpoint that logs out the user (e.g. delete cookie session)."""
|
133 |
+
|
134 |
+
if "oauth_info" in request.session:
|
135 |
+
request.session.pop("oauth_info", None)
|
136 |
+
|
137 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
138 |
+
context = {
|
139 |
+
"request": request,
|
140 |
+
"logo": competition_info.logo_url,
|
141 |
+
"competition_type": competition_info.competition_type,
|
142 |
+
"__version__": __version__,
|
143 |
+
"rules_available": competition_info.rules is not None,
|
144 |
+
}
|
145 |
+
|
146 |
+
return templates.TemplateResponse("index.html", context)
|
147 |
+
|
148 |
+
|
149 |
+
@app.get("/competition_info", response_class=JSONResponse)
|
150 |
+
async def get_comp_info(request: Request):
|
151 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
152 |
+
info = competition_info.competition_desc
|
153 |
+
resp = {"response": info}
|
154 |
+
return resp
|
155 |
+
|
156 |
+
|
157 |
+
@app.get("/dataset_info", response_class=JSONResponse)
|
158 |
+
async def get_dataset_info(request: Request):
|
159 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
160 |
+
info = competition_info.dataset_desc
|
161 |
+
resp = {"response": info}
|
162 |
+
return resp
|
163 |
+
|
164 |
+
|
165 |
+
@app.get("/rules", response_class=JSONResponse)
|
166 |
+
async def get_rules(request: Request):
|
167 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
168 |
+
if competition_info.rules is not None:
|
169 |
+
return {"response": competition_info.rules}
|
170 |
+
return {"response": "No rules available."}
|
171 |
+
|
172 |
+
|
173 |
+
@app.get("/submission_info", response_class=JSONResponse)
|
174 |
+
async def get_submission_info(request: Request):
|
175 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
176 |
+
info = competition_info.submission_desc
|
177 |
+
resp = {"response": info}
|
178 |
+
return resp
|
179 |
+
|
180 |
+
|
181 |
+
@app.post("/leaderboard", response_class=JSONResponse)
|
182 |
+
async def fetch_leaderboard(
|
183 |
+
request: Request, body: LeaderboardRequest, user_token: str = Depends(utils.user_authentication)
|
184 |
+
):
|
185 |
+
lb = body.lb
|
186 |
+
|
187 |
+
comp_org = COMPETITION_ID.split("/")[0]
|
188 |
+
if user_token is not None:
|
189 |
+
is_user_admin = utils.is_user_admin(user_token, comp_org)
|
190 |
+
else:
|
191 |
+
is_user_admin = False
|
192 |
+
|
193 |
+
if DISABLE_PUBLIC_LB == 1 and lb == "public" and not is_user_admin:
|
194 |
+
return {"response": "Public leaderboard is disabled by the competition host."}
|
195 |
+
|
196 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
197 |
+
leaderboard = Leaderboard(
|
198 |
+
end_date=competition_info.end_date,
|
199 |
+
eval_higher_is_better=competition_info.eval_higher_is_better,
|
200 |
+
max_selected_submissions=competition_info.selection_limit,
|
201 |
+
competition_id=COMPETITION_ID,
|
202 |
+
token=HF_TOKEN,
|
203 |
+
scoring_metric=competition_info.scoring_metric,
|
204 |
+
)
|
205 |
+
if lb == "private":
|
206 |
+
current_utc_time = datetime.datetime.now()
|
207 |
+
if current_utc_time < competition_info.end_date and not is_user_admin:
|
208 |
+
return {"response": f"Private leaderboard will be available on {competition_info.end_date} UTC."}
|
209 |
+
df = leaderboard.fetch(private=lb == "private")
|
210 |
+
|
211 |
+
if len(df) == 0:
|
212 |
+
return {"response": "No teams yet. Why not make a submission?"}
|
213 |
+
resp = {"response": df.to_markdown(index=False)}
|
214 |
+
return resp
|
215 |
+
|
216 |
+
|
217 |
+
@app.post("/my_submissions", response_class=JSONResponse)
|
218 |
+
async def my_submissions(request: Request, user_token: str = Depends(utils.user_authentication)):
|
219 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
220 |
+
if user_token is None:
|
221 |
+
return {
|
222 |
+
"response": {
|
223 |
+
"submissions": "",
|
224 |
+
"submission_text": SUBMISSION_TEXT.format(competition_info.submission_limit),
|
225 |
+
"error": "**Invalid token. Please login.**",
|
226 |
+
"team_name": "",
|
227 |
+
}
|
228 |
+
}
|
229 |
+
sub = Submissions(
|
230 |
+
end_date=competition_info.end_date,
|
231 |
+
submission_limit=competition_info.submission_limit,
|
232 |
+
competition_id=COMPETITION_ID,
|
233 |
+
token=HF_TOKEN,
|
234 |
+
competition_type=competition_info.competition_type,
|
235 |
+
hardware=competition_info.hardware,
|
236 |
+
)
|
237 |
+
try:
|
238 |
+
subs = sub.my_submissions(user_token)
|
239 |
+
except AuthenticationError:
|
240 |
+
return {
|
241 |
+
"response": {
|
242 |
+
"submissions": "",
|
243 |
+
"submission_text": SUBMISSION_TEXT.format(competition_info.submission_limit),
|
244 |
+
"error": "**Invalid token. Please login.**",
|
245 |
+
"team_name": "",
|
246 |
+
}
|
247 |
+
}
|
248 |
+
subs = subs.to_dict(orient="records")
|
249 |
+
error = ""
|
250 |
+
if len(subs) == 0:
|
251 |
+
error = "**You have not made any submissions yet.**"
|
252 |
+
subs = ""
|
253 |
+
submission_text = SUBMISSION_TEXT.format(competition_info.submission_limit)
|
254 |
+
submission_selection_text = SUBMISSION_SELECTION_TEXT.format(competition_info.selection_limit)
|
255 |
+
|
256 |
+
team_name = utils.get_team_name(user_token, COMPETITION_ID, HF_TOKEN)
|
257 |
+
|
258 |
+
resp = {
|
259 |
+
"response": {
|
260 |
+
"submissions": subs,
|
261 |
+
"submission_text": submission_text + submission_selection_text,
|
262 |
+
"error": error,
|
263 |
+
"team_name": team_name,
|
264 |
+
}
|
265 |
+
}
|
266 |
+
return resp
|
267 |
+
|
268 |
+
|
269 |
+
@app.post("/new_submission", response_class=JSONResponse)
|
270 |
+
async def new_submission(
|
271 |
+
request: Request,
|
272 |
+
submission_file: UploadFile = File(None),
|
273 |
+
hub_model: str = Form(...),
|
274 |
+
submission_comment: str = Form(None),
|
275 |
+
user_token: str = Depends(utils.user_authentication),
|
276 |
+
):
|
277 |
+
if submission_comment is None:
|
278 |
+
submission_comment = ""
|
279 |
+
|
280 |
+
if user_token is None:
|
281 |
+
return {"response": "Invalid token. Please login."}
|
282 |
+
|
283 |
+
todays_date = datetime.datetime.now()
|
284 |
+
start_date = datetime.datetime.strptime(START_DATE, "%Y-%m-%d")
|
285 |
+
if todays_date < start_date:
|
286 |
+
comp_org = COMPETITION_ID.split("/")[0]
|
287 |
+
if not utils.is_user_admin(user_token, comp_org):
|
288 |
+
return {"response": "Competition has not started yet!"}
|
289 |
+
|
290 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
291 |
+
sub = Submissions(
|
292 |
+
end_date=competition_info.end_date,
|
293 |
+
submission_limit=competition_info.submission_limit,
|
294 |
+
competition_id=COMPETITION_ID,
|
295 |
+
token=HF_TOKEN,
|
296 |
+
competition_type=competition_info.competition_type,
|
297 |
+
hardware=competition_info.hardware,
|
298 |
+
)
|
299 |
+
try:
|
300 |
+
if competition_info.competition_type == "generic":
|
301 |
+
resp = sub.new_submission(user_token, submission_file, submission_comment)
|
302 |
+
return {"response": f"Success! You have {resp} submissions remaining today."}
|
303 |
+
if competition_info.competition_type == "script":
|
304 |
+
resp = sub.new_submission(user_token, hub_model, submission_comment)
|
305 |
+
return {"response": f"Success! You have {resp} submissions remaining today."}
|
306 |
+
except RequestException:
|
307 |
+
return {"response": "Hugging Face Hub is unreachable, please try again later"}
|
308 |
+
except AuthenticationError:
|
309 |
+
return {"response": "Invalid token"}
|
310 |
+
except PastDeadlineError:
|
311 |
+
return {"response": "Competition has ended"}
|
312 |
+
except SubmissionError:
|
313 |
+
return {"response": "Invalid submission file"}
|
314 |
+
except SubmissionLimitError:
|
315 |
+
return {"response": "Submission limit reached"}
|
316 |
+
return {"response": "Invalid competition type"}
|
317 |
+
|
318 |
+
|
319 |
+
@app.post("/update_selected_submissions", response_class=JSONResponse)
|
320 |
+
def update_selected_submissions(
|
321 |
+
request: Request, body: UpdateSelectedSubmissionsRequest, user_token: str = Depends(utils.user_authentication)
|
322 |
+
):
|
323 |
+
submission_ids = body.submission_ids
|
324 |
+
|
325 |
+
if user_token is None:
|
326 |
+
return {"success": False, "error": "Invalid token, please login."}
|
327 |
+
|
328 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
329 |
+
sub = Submissions(
|
330 |
+
end_date=competition_info.end_date,
|
331 |
+
submission_limit=competition_info.submission_limit,
|
332 |
+
competition_id=COMPETITION_ID,
|
333 |
+
token=HF_TOKEN,
|
334 |
+
competition_type=competition_info.competition_type,
|
335 |
+
hardware=competition_info.hardware,
|
336 |
+
)
|
337 |
+
submission_ids = submission_ids.split(",")
|
338 |
+
submission_ids = [s.strip() for s in submission_ids]
|
339 |
+
if len(submission_ids) > competition_info.selection_limit:
|
340 |
+
return {
|
341 |
+
"success": False,
|
342 |
+
"error": f"Please select at most {competition_info.selection_limit} submissions.",
|
343 |
+
}
|
344 |
+
sub.update_selected_submissions(user_token=user_token, selected_submission_ids=submission_ids)
|
345 |
+
return {"success": True, "error": ""}
|
346 |
+
|
347 |
+
|
348 |
+
@app.post("/update_team_name", response_class=JSONResponse)
|
349 |
+
def update_team_name(
|
350 |
+
request: Request, body: UpdateTeamNameRequest, user_token: str = Depends(utils.user_authentication)
|
351 |
+
):
|
352 |
+
new_team_name = body.new_team_name
|
353 |
+
|
354 |
+
if user_token is None:
|
355 |
+
return {"success": False, "error": "Invalid token. Please login."}
|
356 |
+
|
357 |
+
if str(new_team_name).strip() == "":
|
358 |
+
return {"success": False, "error": "Team name cannot be empty."}
|
359 |
+
|
360 |
+
try:
|
361 |
+
utils.update_team_name(user_token, new_team_name, COMPETITION_ID, HF_TOKEN)
|
362 |
+
return {"success": True, "error": ""}
|
363 |
+
except Exception as e:
|
364 |
+
return {"success": False, "error": str(e)}
|
365 |
+
|
366 |
+
|
367 |
+
@app.post("/admin/comp_info", response_class=JSONResponse)
|
368 |
+
async def admin_comp_info(request: Request, user_token: str = Depends(utils.user_authentication)):
|
369 |
+
comp_org = COMPETITION_ID.split("/")[0]
|
370 |
+
user_is_admin = utils.is_user_admin(user_token, comp_org)
|
371 |
+
if not user_is_admin:
|
372 |
+
return {"response": "You are not an admin."}, 403
|
373 |
+
|
374 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
375 |
+
|
376 |
+
markdowns = {
|
377 |
+
"competition_desc": competition_info.competition_desc,
|
378 |
+
"rules": competition_info.rules,
|
379 |
+
"submission_desc": competition_info.submission_desc,
|
380 |
+
"dataset_desc": competition_info.dataset_desc,
|
381 |
+
}
|
382 |
+
if markdowns["rules"] is None:
|
383 |
+
markdowns["rules"] = "No rules available."
|
384 |
+
|
385 |
+
config = {
|
386 |
+
"SUBMISSION_LIMIT": competition_info.submission_limit,
|
387 |
+
"SELECTION_LIMIT": competition_info.selection_limit,
|
388 |
+
"END_DATE": competition_info.end_date.strftime("%Y-%m-%d"),
|
389 |
+
"EVAL_HIGHER_IS_BETTER": competition_info.eval_higher_is_better,
|
390 |
+
"SUBMISSION_COLUMNS": competition_info.submission_columns_raw,
|
391 |
+
"SUBMISSION_ID_COLUMN": competition_info.submission_id_col,
|
392 |
+
"LOGO": competition_info.logo_url,
|
393 |
+
"COMPETITION_TYPE": competition_info.competition_type,
|
394 |
+
"EVAL_METRIC": competition_info.metric,
|
395 |
+
"SUBMISSION_ROWS": competition_info.submission_rows,
|
396 |
+
"TIME_LIMIT": competition_info.time_limit,
|
397 |
+
"DATASET": competition_info.dataset,
|
398 |
+
"SUBMISSION_FILENAMES": competition_info.submission_filenames,
|
399 |
+
"SCORING_METRIC": competition_info.scoring_metric,
|
400 |
+
"HARDWARE": competition_info.hardware,
|
401 |
+
}
|
402 |
+
|
403 |
+
return {"response": {"config": config, "markdowns": markdowns}}
|
404 |
+
|
405 |
+
|
406 |
+
@app.post("/admin/update_comp_info", response_class=JSONResponse)
|
407 |
+
async def update_comp_info(request: Request, user_token: str = Depends(utils.user_authentication)):
|
408 |
+
comp_org = COMPETITION_ID.split("/")[0]
|
409 |
+
user_is_admin = utils.is_user_admin(user_token, comp_org)
|
410 |
+
if not user_is_admin:
|
411 |
+
return {"response": "You are not an admin."}, 403
|
412 |
+
|
413 |
+
competition_info = CompetitionInfo(competition_id=COMPETITION_ID, autotrain_token=HF_TOKEN)
|
414 |
+
|
415 |
+
data = await request.json()
|
416 |
+
config = data["config"]
|
417 |
+
markdowns = data["markdowns"]
|
418 |
+
|
419 |
+
valid_keys = [
|
420 |
+
"SUBMISSION_LIMIT",
|
421 |
+
"SELECTION_LIMIT",
|
422 |
+
"END_DATE",
|
423 |
+
"EVAL_HIGHER_IS_BETTER",
|
424 |
+
"SUBMISSION_COLUMNS",
|
425 |
+
"SUBMISSION_ID_COLUMN",
|
426 |
+
"LOGO",
|
427 |
+
"COMPETITION_TYPE",
|
428 |
+
"EVAL_METRIC",
|
429 |
+
"SUBMISSION_ROWS",
|
430 |
+
"TIME_LIMIT",
|
431 |
+
"DATASET",
|
432 |
+
"SUBMISSION_FILENAMES",
|
433 |
+
"SCORING_METRIC",
|
434 |
+
"HARDWARE",
|
435 |
+
]
|
436 |
+
|
437 |
+
for key in config:
|
438 |
+
if key not in valid_keys:
|
439 |
+
return {"success": False, "error": f"Invalid key: {key}"}
|
440 |
+
|
441 |
+
try:
|
442 |
+
competition_info.update_competition_info(config, markdowns, HF_TOKEN)
|
443 |
+
except Exception as e:
|
444 |
+
logger.error(e)
|
445 |
+
return {"success": False}, 500
|
446 |
+
|
447 |
+
return {"success": True}
|
competitions/cli/__init__.py
ADDED
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from abc import ABC, abstractmethod
|
2 |
+
from argparse import ArgumentParser
|
3 |
+
|
4 |
+
|
5 |
+
class BaseCompetitionsCommand(ABC):
|
6 |
+
@staticmethod
|
7 |
+
@abstractmethod
|
8 |
+
def register_subcommand(parser: ArgumentParser):
|
9 |
+
raise NotImplementedError()
|
10 |
+
|
11 |
+
@abstractmethod
|
12 |
+
def run(self):
|
13 |
+
raise NotImplementedError()
|
competitions/cli/competitions.py
ADDED
@@ -0,0 +1,38 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
|
3 |
+
from .. import __version__
|
4 |
+
from .create import CreateCompetitionAppCommand
|
5 |
+
from .run import RunCompetitionsAppCommand
|
6 |
+
from .submit import SubmitCompetitionAppCommand
|
7 |
+
|
8 |
+
|
9 |
+
def main():
|
10 |
+
parser = argparse.ArgumentParser(
|
11 |
+
"Competitions CLI",
|
12 |
+
usage="competitions <command> [<args>]",
|
13 |
+
epilog="For more information about a command, run: `competitions <command> --help`",
|
14 |
+
)
|
15 |
+
parser.add_argument("--version", "-v", help="Display competitions version", action="store_true")
|
16 |
+
commands_parser = parser.add_subparsers(help="commands")
|
17 |
+
|
18 |
+
# Register commands
|
19 |
+
RunCompetitionsAppCommand.register_subcommand(commands_parser)
|
20 |
+
CreateCompetitionAppCommand.register_subcommand(commands_parser)
|
21 |
+
SubmitCompetitionAppCommand.register_subcommand(commands_parser)
|
22 |
+
|
23 |
+
args = parser.parse_args()
|
24 |
+
|
25 |
+
if args.version:
|
26 |
+
print(__version__)
|
27 |
+
exit(0)
|
28 |
+
|
29 |
+
if not hasattr(args, "func"):
|
30 |
+
parser.print_help()
|
31 |
+
exit(1)
|
32 |
+
|
33 |
+
command = args.func(args)
|
34 |
+
command.run()
|
35 |
+
|
36 |
+
|
37 |
+
if __name__ == "__main__":
|
38 |
+
main()
|
competitions/cli/create.py
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from argparse import ArgumentParser
|
2 |
+
|
3 |
+
from . import BaseCompetitionsCommand
|
4 |
+
|
5 |
+
|
6 |
+
def create_command_factory(args):
|
7 |
+
return CreateCompetitionAppCommand()
|
8 |
+
|
9 |
+
|
10 |
+
class CreateCompetitionAppCommand(BaseCompetitionsCommand):
|
11 |
+
@staticmethod
|
12 |
+
def register_subcommand(parser: ArgumentParser):
|
13 |
+
create_project_parser = parser.add_parser("create", description="✨ Start UI to create a new competition")
|
14 |
+
create_project_parser.set_defaults(func=create_command_factory)
|
15 |
+
|
16 |
+
def run(self):
|
17 |
+
from competitions.create import main
|
18 |
+
|
19 |
+
demo = main()
|
20 |
+
demo.launch()
|
competitions/cli/run.py
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from argparse import ArgumentParser
|
2 |
+
|
3 |
+
from . import BaseCompetitionsCommand
|
4 |
+
|
5 |
+
|
6 |
+
def run_app_command_factory(args):
|
7 |
+
return RunCompetitionsAppCommand(args)
|
8 |
+
|
9 |
+
|
10 |
+
class RunCompetitionsAppCommand(BaseCompetitionsCommand):
|
11 |
+
@staticmethod
|
12 |
+
def register_subcommand(parser: ArgumentParser):
|
13 |
+
create_project_parser = parser.add_parser("run", description="✨ Run competitions app")
|
14 |
+
create_project_parser.add_argument("--host", default="0.0.0.0", help="Host to run app on")
|
15 |
+
create_project_parser.add_argument("--port", default=7860, help="Port to run app on")
|
16 |
+
create_project_parser.set_defaults(func=run_app_command_factory)
|
17 |
+
|
18 |
+
def __init__(self, args):
|
19 |
+
self.host = args.host
|
20 |
+
self.port = args.port
|
21 |
+
|
22 |
+
def run(self):
|
23 |
+
import uvicorn
|
24 |
+
|
25 |
+
from competitions.app import app
|
26 |
+
|
27 |
+
uvicorn.run(app, host=self.host, port=self.port)
|
competitions/cli/submit.py
ADDED
@@ -0,0 +1,49 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from argparse import ArgumentParser
|
3 |
+
|
4 |
+
import requests
|
5 |
+
|
6 |
+
from . import BaseCompetitionsCommand
|
7 |
+
|
8 |
+
|
9 |
+
def submit_commands_factory(args):
|
10 |
+
return SubmitCompetitionAppCommand(args)
|
11 |
+
|
12 |
+
|
13 |
+
class SubmitCompetitionAppCommand(BaseCompetitionsCommand):
|
14 |
+
def __init__(self, args):
|
15 |
+
self.args = args
|
16 |
+
|
17 |
+
@staticmethod
|
18 |
+
def register_subcommand(parser: ArgumentParser):
|
19 |
+
submit_competition_parser = parser.add_parser("submit", description="Submit to a competition")
|
20 |
+
submit_competition_parser.add_argument(
|
21 |
+
"--competition_id", type=str, help="ID of the competition, e.g. huggingface/cool-competition"
|
22 |
+
)
|
23 |
+
submit_competition_parser.add_argument(
|
24 |
+
"--submission", type=str, help="Path to submission file or HuggingFace hub repo"
|
25 |
+
)
|
26 |
+
submit_competition_parser.add_argument("--comment", type=str, help="Submission comment", default="")
|
27 |
+
submit_competition_parser.add_argument("--token", type=str, help="User token, read-only", default="")
|
28 |
+
submit_competition_parser.set_defaults(func=submit_commands_factory)
|
29 |
+
|
30 |
+
def run(self):
|
31 |
+
if os.path.isfile(self.args.submission):
|
32 |
+
files = {"submission_file": open(self.args.submission, "rb")}
|
33 |
+
data = {
|
34 |
+
"hub_model": "None",
|
35 |
+
"submission_comment": self.args.comment,
|
36 |
+
}
|
37 |
+
else:
|
38 |
+
files = {"submission_file": None}
|
39 |
+
data = {
|
40 |
+
"hub_model": self.args.submission,
|
41 |
+
"submission_comment": self.args.comment,
|
42 |
+
}
|
43 |
+
|
44 |
+
headers = {"Authorization": f"Bearer {self.args.token}"}
|
45 |
+
|
46 |
+
api_url = "https://" + self.args.competition_id.replace("/", "-") + ".hf.space/new_submission"
|
47 |
+
|
48 |
+
response = requests.post(api_url, data=data, files=files, headers=headers)
|
49 |
+
print(response.json())
|
competitions/compute_metrics.py
ADDED
@@ -0,0 +1,73 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import importlib
|
2 |
+
import os
|
3 |
+
import sys
|
4 |
+
|
5 |
+
import pandas as pd
|
6 |
+
from huggingface_hub import hf_hub_download
|
7 |
+
from sklearn import metrics
|
8 |
+
|
9 |
+
|
10 |
+
def compute_metrics(params):
|
11 |
+
if params.metric == "custom":
|
12 |
+
metric_file = hf_hub_download(
|
13 |
+
repo_id=params.competition_id,
|
14 |
+
filename="metric.py",
|
15 |
+
token=params.token,
|
16 |
+
repo_type="dataset",
|
17 |
+
)
|
18 |
+
sys.path.append(os.path.dirname(metric_file))
|
19 |
+
metric = importlib.import_module("metric")
|
20 |
+
evaluation = metric.compute(params)
|
21 |
+
else:
|
22 |
+
solution_file = hf_hub_download(
|
23 |
+
repo_id=params.competition_id,
|
24 |
+
filename="solution.csv",
|
25 |
+
token=params.token,
|
26 |
+
repo_type="dataset",
|
27 |
+
)
|
28 |
+
|
29 |
+
solution_df = pd.read_csv(solution_file)
|
30 |
+
|
31 |
+
submission_filename = f"submissions/{params.team_id}-{params.submission_id}.csv"
|
32 |
+
submission_file = hf_hub_download(
|
33 |
+
repo_id=params.competition_id,
|
34 |
+
filename=submission_filename,
|
35 |
+
token=params.token,
|
36 |
+
repo_type="dataset",
|
37 |
+
)
|
38 |
+
submission_df = pd.read_csv(submission_file)
|
39 |
+
|
40 |
+
public_ids = solution_df[solution_df.split == "public"][params.submission_id_col].values
|
41 |
+
private_ids = solution_df[solution_df.split == "private"][params.submission_id_col].values
|
42 |
+
|
43 |
+
public_solution_df = solution_df[solution_df[params.submission_id_col].isin(public_ids)]
|
44 |
+
public_submission_df = submission_df[submission_df[params.submission_id_col].isin(public_ids)]
|
45 |
+
|
46 |
+
private_solution_df = solution_df[solution_df[params.submission_id_col].isin(private_ids)]
|
47 |
+
private_submission_df = submission_df[submission_df[params.submission_id_col].isin(private_ids)]
|
48 |
+
|
49 |
+
public_solution_df = public_solution_df.sort_values(params.submission_id_col).reset_index(drop=True)
|
50 |
+
public_submission_df = public_submission_df.sort_values(params.submission_id_col).reset_index(drop=True)
|
51 |
+
|
52 |
+
private_solution_df = private_solution_df.sort_values(params.submission_id_col).reset_index(drop=True)
|
53 |
+
private_submission_df = private_submission_df.sort_values(params.submission_id_col).reset_index(drop=True)
|
54 |
+
|
55 |
+
_metric = getattr(metrics, params.metric)
|
56 |
+
target_cols = [col for col in solution_df.columns if col not in [params.submission_id_col, "split"]]
|
57 |
+
public_score = _metric(public_solution_df[target_cols], public_submission_df[target_cols])
|
58 |
+
private_score = _metric(private_solution_df[target_cols], private_submission_df[target_cols])
|
59 |
+
|
60 |
+
# scores can also be dictionaries for multiple metrics
|
61 |
+
evaluation = {
|
62 |
+
"public_score": {
|
63 |
+
params.metric: public_score,
|
64 |
+
},
|
65 |
+
"private_score": {
|
66 |
+
params.metric: private_score,
|
67 |
+
},
|
68 |
+
}
|
69 |
+
|
70 |
+
# check all keys in public_score and private_score are same
|
71 |
+
if evaluation["public_score"].keys() != evaluation["private_score"].keys():
|
72 |
+
raise ValueError("Public and private scores have different keys")
|
73 |
+
return evaluation
|
competitions/create.py
ADDED
@@ -0,0 +1,348 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io
|
2 |
+
import json
|
3 |
+
|
4 |
+
import gradio as gr
|
5 |
+
from huggingface_hub import HfApi
|
6 |
+
from loguru import logger
|
7 |
+
|
8 |
+
from competitions.utils import token_information
|
9 |
+
|
10 |
+
|
11 |
+
COMPETITION_DESC = """Sample competition description"""
|
12 |
+
DATASET_DESC = """Sample dataset description"""
|
13 |
+
SUBMISSION_DESC = """Sample submission description"""
|
14 |
+
RULES = """Sample rules"""
|
15 |
+
SOLUTION_CSV = """
|
16 |
+
id,pred,split
|
17 |
+
0,1,public
|
18 |
+
1,0,private
|
19 |
+
2,0,private
|
20 |
+
3,1,private
|
21 |
+
4,0,public
|
22 |
+
5,1,private
|
23 |
+
6,1,public
|
24 |
+
7,1,private
|
25 |
+
8,0,public
|
26 |
+
9,0,private
|
27 |
+
10,0,private
|
28 |
+
11,0,private
|
29 |
+
12,1,private
|
30 |
+
13,0,private
|
31 |
+
14,1,public
|
32 |
+
15,1,private
|
33 |
+
16,1,private
|
34 |
+
17,0,private
|
35 |
+
18,0,private
|
36 |
+
19,0,public
|
37 |
+
20,0,private
|
38 |
+
21,0,private
|
39 |
+
22,1,private
|
40 |
+
23,1,public
|
41 |
+
24,0,private
|
42 |
+
25,0,private
|
43 |
+
26,0,public
|
44 |
+
27,1,private
|
45 |
+
28,1,private
|
46 |
+
29,0,private
|
47 |
+
30,0,public
|
48 |
+
"""
|
49 |
+
SOLUTION_CSV = SOLUTION_CSV.strip()
|
50 |
+
|
51 |
+
DOCKERFILE = """
|
52 |
+
FROM huggingface/competitions:latest
|
53 |
+
|
54 |
+
CMD uvicorn competitions.app:app --host 0.0.0.0 --port 7860 --workers 1
|
55 |
+
"""
|
56 |
+
DOCKERFILE = DOCKERFILE.replace("\n", " ").replace(" ", "\n").strip()
|
57 |
+
|
58 |
+
HARDWARE_CHOICES = [
|
59 |
+
"cpu-basic",
|
60 |
+
"cpu-upgrade",
|
61 |
+
"t4-small",
|
62 |
+
"t4-medium",
|
63 |
+
"a10g-small",
|
64 |
+
"a10g-large",
|
65 |
+
"a10g-largex2",
|
66 |
+
"a10g-largex4",
|
67 |
+
"a100-large",
|
68 |
+
]
|
69 |
+
METRIC_CHOICES = [
|
70 |
+
"accuracy_score",
|
71 |
+
"f1_score",
|
72 |
+
"hamming_loss",
|
73 |
+
"jaccard_score",
|
74 |
+
"log_loss",
|
75 |
+
"roc_auc_score",
|
76 |
+
"mean_squared_error",
|
77 |
+
"mean_absolute_error",
|
78 |
+
"r2_score",
|
79 |
+
"custom",
|
80 |
+
]
|
81 |
+
|
82 |
+
|
83 |
+
def check_if_user_can_create_competition(user_token):
|
84 |
+
"""
|
85 |
+
Check if the user can create a competition
|
86 |
+
:param user_token: the user's token
|
87 |
+
:return: True if the user can create a competition, False otherwise
|
88 |
+
"""
|
89 |
+
user_info = token_information(user_token)
|
90 |
+
valid_orgs = user_info["orgs"]
|
91 |
+
|
92 |
+
return gr.Dropdown(
|
93 |
+
choices=valid_orgs,
|
94 |
+
visible=True,
|
95 |
+
value=valid_orgs[0],
|
96 |
+
)
|
97 |
+
|
98 |
+
|
99 |
+
def _create_readme(competition_name):
|
100 |
+
_readme = "---\n"
|
101 |
+
_readme += f"title: {competition_name}\n"
|
102 |
+
_readme += "emoji: 🚀\n"
|
103 |
+
_readme += "colorFrom: green\n"
|
104 |
+
_readme += "colorTo: indigo\n"
|
105 |
+
_readme += "sdk: docker\n"
|
106 |
+
_readme += "pinned: false\n"
|
107 |
+
_readme += "tags:\n"
|
108 |
+
_readme += " - competition\n"
|
109 |
+
_readme += "hf_oauth: true\n"
|
110 |
+
_readme += "hf_oauth_scopes:\n"
|
111 |
+
_readme += " - read-repos\n"
|
112 |
+
_readme += "---\n"
|
113 |
+
_readme = io.BytesIO(_readme.encode())
|
114 |
+
return _readme
|
115 |
+
|
116 |
+
|
117 |
+
def _create(
|
118 |
+
user_token,
|
119 |
+
organization,
|
120 |
+
competition_name,
|
121 |
+
competition_logo,
|
122 |
+
hardware,
|
123 |
+
competition_type,
|
124 |
+
time_limit,
|
125 |
+
metric,
|
126 |
+
metric_higher_is_better,
|
127 |
+
submission_limit,
|
128 |
+
selection_limit,
|
129 |
+
end_date,
|
130 |
+
submission_id_column,
|
131 |
+
submission_columns,
|
132 |
+
submission_rows,
|
133 |
+
):
|
134 |
+
"""
|
135 |
+
Create a competition
|
136 |
+
"""
|
137 |
+
|
138 |
+
# make sure competition name is alphanumeric
|
139 |
+
competition_name = "".join([c for c in competition_name if c.isalnum()])
|
140 |
+
if len(competition_name) == 0:
|
141 |
+
raise gr.Error("Please provide a valid alphanumeric competition name")
|
142 |
+
|
143 |
+
conf_json = {
|
144 |
+
"COMPETITION_TYPE": competition_type,
|
145 |
+
"SUBMISSION_LIMIT": int(submission_limit),
|
146 |
+
"TIME_LIMIT": int(time_limit),
|
147 |
+
"SELECTION_LIMIT": int(selection_limit),
|
148 |
+
"HARDWARE": hardware,
|
149 |
+
"END_DATE": end_date,
|
150 |
+
"EVAL_HIGHER_IS_BETTER": metric_higher_is_better is True,
|
151 |
+
"SUBMISSION_ID_COLUMN": submission_id_column,
|
152 |
+
"SUBMISSION_COLUMNS": submission_columns,
|
153 |
+
"SUBMISSION_ROWS": int(submission_rows),
|
154 |
+
"EVAL_METRIC": metric,
|
155 |
+
"LOGO": competition_logo,
|
156 |
+
"DATASET": "",
|
157 |
+
"SUBMISSION_FILENAMES": ["submission.csv"],
|
158 |
+
"SCORING_METRIC": "",
|
159 |
+
}
|
160 |
+
teams_json = {}
|
161 |
+
user_team_json = {}
|
162 |
+
|
163 |
+
logger.info(f"Creating competition: {competition_name}")
|
164 |
+
|
165 |
+
api = HfApi(token=user_token)
|
166 |
+
api.create_repo(
|
167 |
+
repo_id=f"{organization}/{competition_name}",
|
168 |
+
repo_type="dataset",
|
169 |
+
private=True,
|
170 |
+
)
|
171 |
+
|
172 |
+
conf_json = json.dumps(conf_json, indent=4)
|
173 |
+
conf_json_bytes = conf_json.encode("utf-8")
|
174 |
+
conf_json_buffer = io.BytesIO(conf_json_bytes)
|
175 |
+
api.upload_file(
|
176 |
+
path_or_fileobj=conf_json_buffer,
|
177 |
+
path_in_repo="conf.json",
|
178 |
+
repo_id=f"{organization}/{competition_name}",
|
179 |
+
repo_type="dataset",
|
180 |
+
)
|
181 |
+
|
182 |
+
teams_json = json.dumps(teams_json, indent=4)
|
183 |
+
teams_json_bytes = teams_json.encode("utf-8")
|
184 |
+
teams_json_buffer = io.BytesIO(teams_json_bytes)
|
185 |
+
api.upload_file(
|
186 |
+
path_or_fileobj=teams_json_buffer,
|
187 |
+
path_in_repo="teams.json",
|
188 |
+
repo_id=f"{organization}/{competition_name}",
|
189 |
+
repo_type="dataset",
|
190 |
+
)
|
191 |
+
|
192 |
+
user_team_json = json.dumps(user_team_json, indent=4)
|
193 |
+
user_team_json_bytes = user_team_json.encode("utf-8")
|
194 |
+
user_team_json_buffer = io.BytesIO(user_team_json_bytes)
|
195 |
+
api.upload_file(
|
196 |
+
path_or_fileobj=user_team_json_buffer,
|
197 |
+
path_in_repo="user_team.json",
|
198 |
+
repo_id=f"{organization}/{competition_name}",
|
199 |
+
repo_type="dataset",
|
200 |
+
)
|
201 |
+
|
202 |
+
comp_desc = io.BytesIO(COMPETITION_DESC.encode())
|
203 |
+
api.upload_file(
|
204 |
+
path_or_fileobj=comp_desc,
|
205 |
+
path_in_repo="COMPETITION_DESC.md",
|
206 |
+
repo_id=f"{organization}/{competition_name}",
|
207 |
+
repo_type="dataset",
|
208 |
+
)
|
209 |
+
|
210 |
+
dataset_desc = io.BytesIO(DATASET_DESC.encode())
|
211 |
+
api.upload_file(
|
212 |
+
path_or_fileobj=dataset_desc,
|
213 |
+
path_in_repo="DATASET_DESC.md",
|
214 |
+
repo_id=f"{organization}/{competition_name}",
|
215 |
+
repo_type="dataset",
|
216 |
+
)
|
217 |
+
|
218 |
+
submission_desc = io.BytesIO(SUBMISSION_DESC.encode())
|
219 |
+
api.upload_file(
|
220 |
+
path_or_fileobj=submission_desc,
|
221 |
+
path_in_repo="SUBMISSION_DESC.md",
|
222 |
+
repo_id=f"{organization}/{competition_name}",
|
223 |
+
repo_type="dataset",
|
224 |
+
)
|
225 |
+
|
226 |
+
solution_csv = io.BytesIO(SOLUTION_CSV.encode())
|
227 |
+
api.upload_file(
|
228 |
+
path_or_fileobj=solution_csv,
|
229 |
+
path_in_repo="solution.csv",
|
230 |
+
repo_id=f"{organization}/{competition_name}",
|
231 |
+
repo_type="dataset",
|
232 |
+
)
|
233 |
+
|
234 |
+
rules = io.BytesIO(RULES.encode())
|
235 |
+
api.upload_file(
|
236 |
+
path_or_fileobj=rules,
|
237 |
+
path_in_repo="RULES.md",
|
238 |
+
repo_id=f"{organization}/{competition_name}",
|
239 |
+
repo_type="dataset",
|
240 |
+
)
|
241 |
+
|
242 |
+
# create competition space
|
243 |
+
api.create_repo(
|
244 |
+
repo_id=f"{organization}/{competition_name}",
|
245 |
+
repo_type="space",
|
246 |
+
space_sdk="docker",
|
247 |
+
space_hardware="cpu-basic" if competition_type == "script" else hardware,
|
248 |
+
private=True,
|
249 |
+
)
|
250 |
+
api.add_space_secret(repo_id=f"{organization}/{competition_name}", key="HF_TOKEN", value=user_token)
|
251 |
+
api.add_space_secret(
|
252 |
+
repo_id=f"{organization}/{competition_name}",
|
253 |
+
key="COMPETITION_ID",
|
254 |
+
value=f"{organization}/{competition_name}",
|
255 |
+
)
|
256 |
+
readme = _create_readme(competition_name)
|
257 |
+
api.upload_file(
|
258 |
+
path_or_fileobj=readme,
|
259 |
+
path_in_repo="README.md",
|
260 |
+
repo_id=f"{organization}/{competition_name}",
|
261 |
+
repo_type="space",
|
262 |
+
)
|
263 |
+
|
264 |
+
_dockerfile = io.BytesIO(DOCKERFILE.encode())
|
265 |
+
api.upload_file(
|
266 |
+
path_or_fileobj=_dockerfile,
|
267 |
+
path_in_repo="Dockerfile",
|
268 |
+
repo_id=f"{organization}/{competition_name}",
|
269 |
+
repo_type="space",
|
270 |
+
)
|
271 |
+
|
272 |
+
return gr.Markdown(
|
273 |
+
value=f"""Created private dataset and competition space.
|
274 |
+
To make competition public, you should make the space public.
|
275 |
+
Please note that the dataset should always be kept private.
|
276 |
+
|
277 |
+
Private dataset: https://huggingface.co/datasets/{organization}/{competition_name}
|
278 |
+
|
279 |
+
Competition space: https://huggingface.co/spaces/{organization}/{competition_name}
|
280 |
+
|
281 |
+
Note: there's still some work left. Now you must change the solution.csv file to your own solution,
|
282 |
+
and make changes to *_desc.md files to reflect your competition. You may also change conf.json
|
283 |
+
to suit your needs. Please refer to the [documentation](https://hf.co/docs/competitions) for more information.
|
284 |
+
"""
|
285 |
+
)
|
286 |
+
|
287 |
+
|
288 |
+
def main():
|
289 |
+
with gr.Blocks() as demo:
|
290 |
+
gr.Markdown("# Hugging Face Competition Creator")
|
291 |
+
token = gr.Textbox(label="Your Hugging Face write token", lines=1, type="password")
|
292 |
+
with gr.Row():
|
293 |
+
organization = gr.Dropdown(label="Organization name", choices=[""])
|
294 |
+
competition_name = gr.Textbox(label="Competition name", lines=1)
|
295 |
+
competition_logo = gr.Textbox(label="Competition logo", value="https://mysite.com/mylogo.png", lines=1)
|
296 |
+
with gr.Group():
|
297 |
+
with gr.Row():
|
298 |
+
hardware = gr.Dropdown(label="Hardware to use", choices=HARDWARE_CHOICES, value=HARDWARE_CHOICES[0])
|
299 |
+
competition_type = gr.Dropdown(
|
300 |
+
label="Competition type", choices=["generic", "script"], value="generic"
|
301 |
+
)
|
302 |
+
time_limit = gr.Textbox(
|
303 |
+
label="Time limit (s). Only used for script competitions", lines=1, value="3600"
|
304 |
+
)
|
305 |
+
with gr.Row():
|
306 |
+
metric = gr.Dropdown(label="Metric to use", choices=METRIC_CHOICES, value=METRIC_CHOICES[0])
|
307 |
+
metric_higher_is_better = gr.Dropdown(
|
308 |
+
label="Is higher metric better?", choices=[True, False], value=True
|
309 |
+
)
|
310 |
+
with gr.Row():
|
311 |
+
submission_limit = gr.Textbox(label="Submission limit per day", lines=1, value="5")
|
312 |
+
selection_limit = gr.Textbox(label="Final selection limit", lines=1, value="2")
|
313 |
+
end_date = gr.Textbox(label="End date (YYYY-MM-DD)", lines=1, value="2024-12-31")
|
314 |
+
with gr.Row():
|
315 |
+
submission_id_column = gr.Textbox(label="Submission id column", lines=1, value="id")
|
316 |
+
submission_columns = gr.Textbox(label="Submission columns", lines=1, value="id,pred")
|
317 |
+
submission_rows = gr.Textbox(label="Submission total rows (exclusing header)", lines=1, value="10000")
|
318 |
+
|
319 |
+
output_md = gr.Markdown("Click the button below to create the competition")
|
320 |
+
create_competition = gr.Button(value="Create competition")
|
321 |
+
token.change(check_if_user_can_create_competition, inputs=token, outputs=organization)
|
322 |
+
|
323 |
+
create_competition.click(
|
324 |
+
_create,
|
325 |
+
inputs=[
|
326 |
+
token,
|
327 |
+
organization,
|
328 |
+
competition_name,
|
329 |
+
competition_logo,
|
330 |
+
hardware,
|
331 |
+
competition_type,
|
332 |
+
time_limit,
|
333 |
+
metric,
|
334 |
+
metric_higher_is_better,
|
335 |
+
submission_limit,
|
336 |
+
selection_limit,
|
337 |
+
end_date,
|
338 |
+
submission_id_column,
|
339 |
+
submission_columns,
|
340 |
+
submission_rows,
|
341 |
+
],
|
342 |
+
outputs=output_md,
|
343 |
+
)
|
344 |
+
return demo
|
345 |
+
|
346 |
+
|
347 |
+
if __name__ == "__main__":
|
348 |
+
main().launch()
|
competitions/download.py
ADDED
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from pathlib import Path
|
3 |
+
from typing import Dict, List, Optional, Union
|
4 |
+
|
5 |
+
from huggingface_hub import HfApi
|
6 |
+
from huggingface_hub.constants import DEFAULT_REVISION, HUGGINGFACE_HUB_CACHE, REPO_TYPES
|
7 |
+
from huggingface_hub.file_download import REGEX_COMMIT_HASH, hf_hub_download, repo_folder_name
|
8 |
+
from huggingface_hub.utils import filter_repo_objects, validate_hf_hub_args
|
9 |
+
from joblib import Parallel, delayed
|
10 |
+
|
11 |
+
|
12 |
+
@validate_hf_hub_args
|
13 |
+
def snapshot_download(
|
14 |
+
repo_id: str,
|
15 |
+
*,
|
16 |
+
revision: Optional[str] = None,
|
17 |
+
repo_type: Optional[str] = None,
|
18 |
+
cache_dir: Union[str, Path, None] = None,
|
19 |
+
library_name: Optional[str] = None,
|
20 |
+
library_version: Optional[str] = None,
|
21 |
+
user_agent: Optional[Union[Dict, str]] = None,
|
22 |
+
proxies: Optional[Dict] = None,
|
23 |
+
etag_timeout: Optional[float] = 10,
|
24 |
+
resume_download: Optional[bool] = False,
|
25 |
+
use_auth_token: Optional[Union[bool, str]] = None,
|
26 |
+
local_files_only: Optional[bool] = False,
|
27 |
+
allow_regex: Optional[Union[List[str], str]] = None,
|
28 |
+
ignore_regex: Optional[Union[List[str], str]] = None,
|
29 |
+
allow_patterns: Optional[Union[List[str], str]] = None,
|
30 |
+
ignore_patterns: Optional[Union[List[str], str]] = None,
|
31 |
+
) -> str:
|
32 |
+
"""Download all files of a repo.
|
33 |
+
|
34 |
+
Downloads a whole snapshot of a repo's files at the specified revision. This
|
35 |
+
is useful when you want all files from a repo, because you don't know which
|
36 |
+
ones you will need a priori. All files are nested inside a folder in order
|
37 |
+
to keep their actual filename relative to that folder.
|
38 |
+
|
39 |
+
An alternative would be to just clone a repo but this would require that the
|
40 |
+
user always has git and git-lfs installed, and properly configured.
|
41 |
+
|
42 |
+
Args:
|
43 |
+
repo_id (`str`):
|
44 |
+
A user or an organization name and a repo name separated by a `/`.
|
45 |
+
revision (`str`, *optional*):
|
46 |
+
An optional Git revision id which can be a branch name, a tag, or a
|
47 |
+
commit hash.
|
48 |
+
repo_type (`str`, *optional*):
|
49 |
+
Set to `"dataset"` or `"space"` if uploading to a dataset or space,
|
50 |
+
`None` or `"model"` if uploading to a model. Default is `None`.
|
51 |
+
cache_dir (`str`, `Path`, *optional*):
|
52 |
+
Path to the folder where cached files are stored.
|
53 |
+
library_name (`str`, *optional*):
|
54 |
+
The name of the library to which the object corresponds.
|
55 |
+
library_version (`str`, *optional*):
|
56 |
+
The version of the library.
|
57 |
+
user_agent (`str`, `dict`, *optional*):
|
58 |
+
The user-agent info in the form of a dictionary or a string.
|
59 |
+
proxies (`dict`, *optional*):
|
60 |
+
Dictionary mapping protocol to the URL of the proxy passed to
|
61 |
+
`requests.request`.
|
62 |
+
etag_timeout (`float`, *optional*, defaults to `10`):
|
63 |
+
When fetching ETag, how many seconds to wait for the server to send
|
64 |
+
data before giving up which is passed to `requests.request`.
|
65 |
+
resume_download (`bool`, *optional*, defaults to `False):
|
66 |
+
If `True`, resume a previously interrupted download.
|
67 |
+
use_auth_token (`str`, `bool`, *optional*):
|
68 |
+
A token to be used for the download.
|
69 |
+
- If `True`, the token is read from the HuggingFace config
|
70 |
+
folder.
|
71 |
+
- If a string, it's used as the authentication token.
|
72 |
+
local_files_only (`bool`, *optional*, defaults to `False`):
|
73 |
+
If `True`, avoid downloading the file and return the path to the
|
74 |
+
local cached file if it exists.
|
75 |
+
allow_patterns (`List[str]` or `str`, *optional*):
|
76 |
+
If provided, only files matching at least one pattern are downloaded.
|
77 |
+
ignore_patterns (`List[str]` or `str`, *optional*):
|
78 |
+
If provided, files matching any of the patterns are not downloaded.
|
79 |
+
|
80 |
+
Returns:
|
81 |
+
Local folder path (string) of repo snapshot
|
82 |
+
|
83 |
+
<Tip>
|
84 |
+
|
85 |
+
Raises the following errors:
|
86 |
+
|
87 |
+
- [`EnvironmentError`](https://docs.python.org/3/library/exceptions.html#EnvironmentError)
|
88 |
+
if `use_auth_token=True` and the token cannot be found.
|
89 |
+
- [`OSError`](https://docs.python.org/3/library/exceptions.html#OSError) if
|
90 |
+
ETag cannot be determined.
|
91 |
+
- [`ValueError`](https://docs.python.org/3/library/exceptions.html#ValueError)
|
92 |
+
if some parameter value is invalid
|
93 |
+
|
94 |
+
</Tip>
|
95 |
+
"""
|
96 |
+
if cache_dir is None:
|
97 |
+
cache_dir = HUGGINGFACE_HUB_CACHE
|
98 |
+
if revision is None:
|
99 |
+
revision = DEFAULT_REVISION
|
100 |
+
if isinstance(cache_dir, Path):
|
101 |
+
cache_dir = str(cache_dir)
|
102 |
+
|
103 |
+
if repo_type is None:
|
104 |
+
repo_type = "model"
|
105 |
+
if repo_type not in REPO_TYPES:
|
106 |
+
raise ValueError(f"Invalid repo type: {repo_type}. Accepted repo types are:" f" {str(REPO_TYPES)}")
|
107 |
+
|
108 |
+
storage_folder = os.path.join(cache_dir, repo_folder_name(repo_id=repo_id, repo_type=repo_type))
|
109 |
+
|
110 |
+
# TODO: remove these 4 lines in version 0.12
|
111 |
+
# Deprecated code to ensure backward compatibility.
|
112 |
+
if allow_regex is not None:
|
113 |
+
allow_patterns = allow_regex
|
114 |
+
if ignore_regex is not None:
|
115 |
+
ignore_patterns = ignore_regex
|
116 |
+
|
117 |
+
# if we have no internet connection we will look for an
|
118 |
+
# appropriate folder in the cache
|
119 |
+
# If the specified revision is a commit hash, look inside "snapshots".
|
120 |
+
# If the specified revision is a branch or tag, look inside "refs".
|
121 |
+
if local_files_only:
|
122 |
+
if REGEX_COMMIT_HASH.match(revision):
|
123 |
+
commit_hash = revision
|
124 |
+
else:
|
125 |
+
# retrieve commit_hash from file
|
126 |
+
ref_path = os.path.join(storage_folder, "refs", revision)
|
127 |
+
with open(ref_path) as f:
|
128 |
+
commit_hash = f.read()
|
129 |
+
|
130 |
+
snapshot_folder = os.path.join(storage_folder, "snapshots", commit_hash)
|
131 |
+
|
132 |
+
if os.path.exists(snapshot_folder):
|
133 |
+
return snapshot_folder
|
134 |
+
|
135 |
+
raise ValueError(
|
136 |
+
"Cannot find an appropriate cached snapshot folder for the specified"
|
137 |
+
" revision on the local disk and outgoing traffic has been disabled. To"
|
138 |
+
" enable repo look-ups and downloads online, set 'local_files_only' to"
|
139 |
+
" False."
|
140 |
+
)
|
141 |
+
|
142 |
+
# if we have internet connection we retrieve the correct folder name from the huggingface api
|
143 |
+
_api = HfApi()
|
144 |
+
repo_info = _api.repo_info(
|
145 |
+
repo_id=repo_id,
|
146 |
+
repo_type=repo_type,
|
147 |
+
revision=revision,
|
148 |
+
use_auth_token=use_auth_token,
|
149 |
+
)
|
150 |
+
filtered_repo_files = list(
|
151 |
+
filter_repo_objects(
|
152 |
+
items=[f.rfilename for f in repo_info.siblings],
|
153 |
+
allow_patterns=allow_patterns,
|
154 |
+
ignore_patterns=ignore_patterns,
|
155 |
+
)
|
156 |
+
)
|
157 |
+
commit_hash = repo_info.sha
|
158 |
+
snapshot_folder = os.path.join(storage_folder, "snapshots", commit_hash)
|
159 |
+
# if passed revision is not identical to commit_hash
|
160 |
+
# then revision has to be a branch name or tag name.
|
161 |
+
# In that case store a ref.
|
162 |
+
if revision != commit_hash:
|
163 |
+
ref_path = os.path.join(storage_folder, "refs", revision)
|
164 |
+
os.makedirs(os.path.dirname(ref_path), exist_ok=True)
|
165 |
+
with open(ref_path, "w") as f:
|
166 |
+
f.write(commit_hash)
|
167 |
+
|
168 |
+
# we pass the commit_hash to hf_hub_download
|
169 |
+
# so no network call happens if we already
|
170 |
+
# have the file locally.
|
171 |
+
|
172 |
+
# for repo_file in tqdm(filtered_repo_files, f"Fetching {len(filtered_repo_files)} files"):
|
173 |
+
# _ = hf_hub_download(
|
174 |
+
# repo_id,
|
175 |
+
# filename=repo_file,
|
176 |
+
# repo_type=repo_type,
|
177 |
+
# revision=commit_hash,
|
178 |
+
# cache_dir=cache_dir,
|
179 |
+
# library_name=library_name,
|
180 |
+
# library_version=library_version,
|
181 |
+
# user_agent=user_agent,
|
182 |
+
# proxies=proxies,
|
183 |
+
# etag_timeout=etag_timeout,
|
184 |
+
# resume_download=resume_download,
|
185 |
+
# use_auth_token=use_auth_token,
|
186 |
+
# )
|
187 |
+
|
188 |
+
Parallel(n_jobs=10, backend="threading")(
|
189 |
+
delayed(hf_hub_download)(
|
190 |
+
repo_id,
|
191 |
+
filename=repo_file,
|
192 |
+
repo_type=repo_type,
|
193 |
+
revision=commit_hash,
|
194 |
+
cache_dir=cache_dir,
|
195 |
+
library_name=library_name,
|
196 |
+
library_version=library_version,
|
197 |
+
user_agent=user_agent,
|
198 |
+
proxies=proxies,
|
199 |
+
etag_timeout=etag_timeout,
|
200 |
+
resume_download=resume_download,
|
201 |
+
use_auth_token=use_auth_token,
|
202 |
+
)
|
203 |
+
for repo_file in filtered_repo_files
|
204 |
+
)
|
205 |
+
|
206 |
+
return snapshot_folder
|
competitions/enums.py
ADDED
@@ -0,0 +1,14 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import enum
|
2 |
+
|
3 |
+
|
4 |
+
class SubmissionStatus(enum.Enum):
|
5 |
+
PENDING = 0
|
6 |
+
QUEUED = 1
|
7 |
+
PROCESSING = 2
|
8 |
+
SUCCESS = 3
|
9 |
+
FAILED = 4
|
10 |
+
|
11 |
+
|
12 |
+
class CompetitionType(enum.Enum):
|
13 |
+
GENERIC = 1
|
14 |
+
SCRIPT = 2
|
competitions/errors.py
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
class AuthenticationError(Exception):
|
2 |
+
pass
|
3 |
+
|
4 |
+
|
5 |
+
class NoSubmissionError(Exception):
|
6 |
+
pass
|
7 |
+
|
8 |
+
|
9 |
+
class SubmissionError(Exception):
|
10 |
+
pass
|
11 |
+
|
12 |
+
|
13 |
+
class SubmissionLimitError(Exception):
|
14 |
+
pass
|
15 |
+
|
16 |
+
|
17 |
+
class PastDeadlineError(Exception):
|
18 |
+
pass
|
competitions/evaluate.py
ADDED
@@ -0,0 +1,135 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import argparse
|
2 |
+
import json
|
3 |
+
import os
|
4 |
+
import shlex
|
5 |
+
import shutil
|
6 |
+
import subprocess
|
7 |
+
|
8 |
+
from huggingface_hub import HfApi, hf_hub_download, snapshot_download
|
9 |
+
from huggingface_hub.utils._errors import EntryNotFoundError
|
10 |
+
from loguru import logger
|
11 |
+
|
12 |
+
from competitions import utils
|
13 |
+
from competitions.compute_metrics import compute_metrics
|
14 |
+
from competitions.enums import SubmissionStatus
|
15 |
+
from competitions.params import EvalParams
|
16 |
+
|
17 |
+
|
18 |
+
def parse_args():
|
19 |
+
parser = argparse.ArgumentParser()
|
20 |
+
parser.add_argument("--config", type=str, required=True)
|
21 |
+
return parser.parse_args()
|
22 |
+
|
23 |
+
|
24 |
+
def upload_submission_file(params, file_path):
|
25 |
+
logger.info("Uploading submission file")
|
26 |
+
pass
|
27 |
+
|
28 |
+
|
29 |
+
def generate_submission_file(params):
|
30 |
+
logger.info("Downloading submission dataset")
|
31 |
+
submission_dir = snapshot_download(
|
32 |
+
repo_id=params.submission_repo,
|
33 |
+
local_dir=params.output_path,
|
34 |
+
token=os.environ.get("USER_TOKEN"),
|
35 |
+
repo_type="model",
|
36 |
+
)
|
37 |
+
# submission_dir has a script.py file
|
38 |
+
# start a subprocess to run the script.py
|
39 |
+
# the script.py will generate a submission.csv file in the submission_dir
|
40 |
+
# push the submission.csv file to the repo using upload_submission_file
|
41 |
+
logger.info("Generating submission file")
|
42 |
+
|
43 |
+
# invalidate USER_TOKEN env var
|
44 |
+
os.environ["USER_TOKEN"] = ""
|
45 |
+
|
46 |
+
# Copy sandbox to submission_dir
|
47 |
+
shutil.copyfile("sandbox", f"{submission_dir}/sandbox")
|
48 |
+
sandbox_path = f"{submission_dir}/sandbox"
|
49 |
+
os.chmod(sandbox_path, 0o755)
|
50 |
+
os.chown(sandbox_path, os.getuid(), os.getgid())
|
51 |
+
|
52 |
+
# Define your command
|
53 |
+
cmd = f"{sandbox_path} python script.py"
|
54 |
+
cmd = shlex.split(cmd)
|
55 |
+
|
56 |
+
# Copy the current environment and modify it
|
57 |
+
env = os.environ.copy()
|
58 |
+
|
59 |
+
# Start the subprocess
|
60 |
+
process = subprocess.Popen(cmd, cwd=submission_dir, env=env)
|
61 |
+
|
62 |
+
# Wait for the process to complete or timeout
|
63 |
+
try:
|
64 |
+
process.wait(timeout=params.time_limit)
|
65 |
+
except subprocess.TimeoutExpired:
|
66 |
+
logger.info(f"Process exceeded {params.time_limit} seconds time limit. Terminating...")
|
67 |
+
process.kill()
|
68 |
+
process.wait()
|
69 |
+
|
70 |
+
# Check if process terminated due to timeout
|
71 |
+
if process.returncode and process.returncode != 0:
|
72 |
+
logger.error("Subprocess didn't terminate successfully")
|
73 |
+
else:
|
74 |
+
logger.info("Subprocess terminated successfully")
|
75 |
+
|
76 |
+
logger.info("contents of submission_dir")
|
77 |
+
logger.info(os.listdir(submission_dir))
|
78 |
+
|
79 |
+
api = HfApi(token=params.token)
|
80 |
+
for sub_file in params.submission_filenames:
|
81 |
+
logger.info(f"Uploading {sub_file} to the repository")
|
82 |
+
sub_file_ext = sub_file.split(".")[-1]
|
83 |
+
api.upload_file(
|
84 |
+
path_or_fileobj=f"{submission_dir}/{sub_file}",
|
85 |
+
path_in_repo=f"submissions/{params.team_id}-{params.submission_id}.{sub_file_ext}",
|
86 |
+
repo_id=params.competition_id,
|
87 |
+
repo_type="dataset",
|
88 |
+
)
|
89 |
+
|
90 |
+
|
91 |
+
@utils.monitor
|
92 |
+
def run(params):
|
93 |
+
logger.info(params)
|
94 |
+
if isinstance(params, dict):
|
95 |
+
params = EvalParams(**params)
|
96 |
+
|
97 |
+
utils.update_submission_status(params, SubmissionStatus.PROCESSING.value)
|
98 |
+
|
99 |
+
if params.competition_type == "script":
|
100 |
+
try:
|
101 |
+
requirements_fname = hf_hub_download(
|
102 |
+
repo_id=params.competition_id,
|
103 |
+
filename="requirements.txt",
|
104 |
+
token=params.token,
|
105 |
+
repo_type="dataset",
|
106 |
+
)
|
107 |
+
except EntryNotFoundError:
|
108 |
+
requirements_fname = None
|
109 |
+
|
110 |
+
if requirements_fname:
|
111 |
+
logger.info("Installing requirements")
|
112 |
+
utils.uninstall_requirements(requirements_fname)
|
113 |
+
utils.install_requirements(requirements_fname)
|
114 |
+
if len(str(params.dataset).strip()) > 0:
|
115 |
+
# _ = Repository(local_dir="/tmp/data", clone_from=params.dataset, token=params.token)
|
116 |
+
_ = snapshot_download(
|
117 |
+
repo_id=params.dataset,
|
118 |
+
local_dir="/tmp/data",
|
119 |
+
token=params.token,
|
120 |
+
repo_type="dataset",
|
121 |
+
)
|
122 |
+
generate_submission_file(params)
|
123 |
+
|
124 |
+
evaluation = compute_metrics(params)
|
125 |
+
|
126 |
+
utils.update_submission_score(params, evaluation["public_score"], evaluation["private_score"])
|
127 |
+
utils.update_submission_status(params, SubmissionStatus.SUCCESS.value)
|
128 |
+
utils.delete_space(params)
|
129 |
+
|
130 |
+
|
131 |
+
if __name__ == "__main__":
|
132 |
+
args = parse_args()
|
133 |
+
_params = json.load(open(args.config, encoding="utf-8"))
|
134 |
+
_params = EvalParams(**_params)
|
135 |
+
run(_params)
|
competitions/info.py
ADDED
@@ -0,0 +1,197 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io
|
2 |
+
import json
|
3 |
+
from dataclasses import dataclass
|
4 |
+
from datetime import datetime
|
5 |
+
|
6 |
+
from huggingface_hub import HfApi, hf_hub_download
|
7 |
+
|
8 |
+
|
9 |
+
@dataclass
|
10 |
+
class CompetitionInfo:
|
11 |
+
competition_id: str
|
12 |
+
autotrain_token: str
|
13 |
+
|
14 |
+
def __post_init__(self):
|
15 |
+
config_fname = hf_hub_download(
|
16 |
+
repo_id=self.competition_id,
|
17 |
+
filename="conf.json",
|
18 |
+
use_auth_token=self.autotrain_token,
|
19 |
+
repo_type="dataset",
|
20 |
+
)
|
21 |
+
competition_desc = hf_hub_download(
|
22 |
+
repo_id=self.competition_id,
|
23 |
+
filename="COMPETITION_DESC.md",
|
24 |
+
use_auth_token=self.autotrain_token,
|
25 |
+
repo_type="dataset",
|
26 |
+
)
|
27 |
+
dataset_desc = hf_hub_download(
|
28 |
+
repo_id=self.competition_id,
|
29 |
+
filename="DATASET_DESC.md",
|
30 |
+
use_auth_token=self.autotrain_token,
|
31 |
+
repo_type="dataset",
|
32 |
+
)
|
33 |
+
self.config = self.load_config(config_fname)
|
34 |
+
self.competition_desc = self.load_md(competition_desc)
|
35 |
+
self.dataset_desc = self.load_md(dataset_desc)
|
36 |
+
try:
|
37 |
+
submission_desc = hf_hub_download(
|
38 |
+
repo_id=self.competition_id,
|
39 |
+
filename="SUBMISSION_DESC.md",
|
40 |
+
use_auth_token=self.autotrain_token,
|
41 |
+
repo_type="dataset",
|
42 |
+
)
|
43 |
+
self.submission_desc = self.load_md(submission_desc)
|
44 |
+
except Exception:
|
45 |
+
self.submission_desc = None
|
46 |
+
|
47 |
+
try:
|
48 |
+
rules_md = hf_hub_download(
|
49 |
+
repo_id=self.competition_id,
|
50 |
+
filename="RULES.md",
|
51 |
+
use_auth_token=self.autotrain_token,
|
52 |
+
repo_type="dataset",
|
53 |
+
)
|
54 |
+
self.rules_md = self.load_md(rules_md)
|
55 |
+
except Exception:
|
56 |
+
self.rules_md = None
|
57 |
+
|
58 |
+
if self.config["EVAL_METRIC"] == "custom":
|
59 |
+
if "SCORING_METRIC" not in self.config:
|
60 |
+
raise ValueError(
|
61 |
+
"For custom metrics, please provide a single SCORING_METRIC name in the competition config file: conf.json"
|
62 |
+
)
|
63 |
+
|
64 |
+
def load_md(self, md_path):
|
65 |
+
with open(md_path, "r", encoding="utf-8") as f:
|
66 |
+
md = f.read()
|
67 |
+
return md
|
68 |
+
|
69 |
+
def load_config(self, config_path):
|
70 |
+
with open(config_path, "r", encoding="utf-8") as f:
|
71 |
+
config = json.load(f)
|
72 |
+
return config
|
73 |
+
|
74 |
+
@property
|
75 |
+
def submission_limit(self):
|
76 |
+
return self.config["SUBMISSION_LIMIT"]
|
77 |
+
|
78 |
+
@property
|
79 |
+
def selection_limit(self):
|
80 |
+
return self.config["SELECTION_LIMIT"]
|
81 |
+
|
82 |
+
@property
|
83 |
+
def end_date(self):
|
84 |
+
e_d = self.config["END_DATE"]
|
85 |
+
return datetime.strptime(e_d, "%Y-%m-%d")
|
86 |
+
|
87 |
+
@property
|
88 |
+
def eval_higher_is_better(self):
|
89 |
+
hb = self.config["EVAL_HIGHER_IS_BETTER"]
|
90 |
+
return True if int(hb) == 1 else False
|
91 |
+
|
92 |
+
@property
|
93 |
+
def competition_description(self):
|
94 |
+
return self.competition_desc
|
95 |
+
|
96 |
+
@property
|
97 |
+
def submission_columns(self):
|
98 |
+
return self.config["SUBMISSION_COLUMNS"].split(",")
|
99 |
+
|
100 |
+
@property
|
101 |
+
def submission_columns_raw(self):
|
102 |
+
return self.config["SUBMISSION_COLUMNS"]
|
103 |
+
|
104 |
+
@property
|
105 |
+
def submission_description(self):
|
106 |
+
return self.submission_desc
|
107 |
+
|
108 |
+
@property
|
109 |
+
def dataset_description(self):
|
110 |
+
return self.dataset_desc
|
111 |
+
|
112 |
+
@property
|
113 |
+
def logo_url(self):
|
114 |
+
return self.config["LOGO"]
|
115 |
+
|
116 |
+
@property
|
117 |
+
def competition_type(self):
|
118 |
+
return self.config["COMPETITION_TYPE"].lower().strip()
|
119 |
+
|
120 |
+
@property
|
121 |
+
def metric(self):
|
122 |
+
return self.config["EVAL_METRIC"]
|
123 |
+
|
124 |
+
@property
|
125 |
+
def submission_id_col(self):
|
126 |
+
return self.config["SUBMISSION_ID_COLUMN"]
|
127 |
+
|
128 |
+
@property
|
129 |
+
def submission_cols(self):
|
130 |
+
cols = self.config["SUBMISSION_COLUMNS"].split(",")
|
131 |
+
cols = [c.strip() for c in cols]
|
132 |
+
return cols
|
133 |
+
|
134 |
+
@property
|
135 |
+
def submission_rows(self):
|
136 |
+
return self.config["SUBMISSION_ROWS"]
|
137 |
+
|
138 |
+
@property
|
139 |
+
def time_limit(self):
|
140 |
+
return self.config["TIME_LIMIT"]
|
141 |
+
|
142 |
+
@property
|
143 |
+
def hardware(self):
|
144 |
+
return self.config.get("HARDWARE", "cpu-basic")
|
145 |
+
|
146 |
+
@property
|
147 |
+
def dataset(self):
|
148 |
+
return self.config.get("DATASET", "")
|
149 |
+
|
150 |
+
@property
|
151 |
+
def submission_filenames(self):
|
152 |
+
return self.config.get("SUBMISSION_FILENAMES", ["submission.csv"])
|
153 |
+
|
154 |
+
@property
|
155 |
+
def scoring_metric(self):
|
156 |
+
if self.config["EVAL_METRIC"] == "custom":
|
157 |
+
if "SCORING_METRIC" not in self.config:
|
158 |
+
raise Exception("Please provide a single SCORING_METRIC in the competition config file: conf.json")
|
159 |
+
if self.config["SCORING_METRIC"] is None:
|
160 |
+
raise Exception("Please provide a single SCORING_METRIC in the competition config file: conf.json")
|
161 |
+
return self.config["SCORING_METRIC"]
|
162 |
+
return self.config["EVAL_METRIC"]
|
163 |
+
|
164 |
+
@property
|
165 |
+
def rules(self):
|
166 |
+
return self.rules_md
|
167 |
+
|
168 |
+
def _save_md(self, md, filename, api):
|
169 |
+
md = io.BytesIO(md.encode())
|
170 |
+
api.upload_file(
|
171 |
+
path_or_fileobj=md,
|
172 |
+
path_in_repo=filename,
|
173 |
+
repo_id=self.competition_id,
|
174 |
+
repo_type="dataset",
|
175 |
+
)
|
176 |
+
|
177 |
+
def update_competition_info(self, config, markdowns, token):
|
178 |
+
api = HfApi(token=token)
|
179 |
+
conf_json = json.dumps(config, indent=4)
|
180 |
+
conf_json_bytes = conf_json.encode("utf-8")
|
181 |
+
conf_json_buffer = io.BytesIO(conf_json_bytes)
|
182 |
+
api.upload_file(
|
183 |
+
path_or_fileobj=conf_json_buffer,
|
184 |
+
path_in_repo="conf.json",
|
185 |
+
repo_id=self.competition_id,
|
186 |
+
repo_type="dataset",
|
187 |
+
)
|
188 |
+
|
189 |
+
competition_desc = markdowns["competition_desc"]
|
190 |
+
dataset_desc = markdowns["dataset_desc"]
|
191 |
+
submission_desc = markdowns["submission_desc"]
|
192 |
+
rules_md = markdowns["rules"]
|
193 |
+
|
194 |
+
self._save_md(competition_desc, "COMPETITION_DESC.md", api)
|
195 |
+
self._save_md(dataset_desc, "DATASET_DESC.md", api)
|
196 |
+
self._save_md(submission_desc, "SUBMISSION_DESC.md", api)
|
197 |
+
self._save_md(rules_md, "RULES.md", api)
|
competitions/leaderboard.py
ADDED
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import glob
|
2 |
+
import json
|
3 |
+
import os
|
4 |
+
import time
|
5 |
+
from dataclasses import dataclass
|
6 |
+
from datetime import datetime
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
from huggingface_hub import hf_hub_download, snapshot_download
|
10 |
+
from loguru import logger
|
11 |
+
|
12 |
+
from competitions.enums import SubmissionStatus
|
13 |
+
|
14 |
+
|
15 |
+
@dataclass
|
16 |
+
class Leaderboard:
|
17 |
+
end_date: datetime
|
18 |
+
eval_higher_is_better: bool
|
19 |
+
max_selected_submissions: int
|
20 |
+
competition_id: str
|
21 |
+
token: str
|
22 |
+
scoring_metric: str
|
23 |
+
|
24 |
+
def __post_init__(self):
|
25 |
+
self.non_score_columns = ["id", "submission_datetime"]
|
26 |
+
|
27 |
+
def _process_public_lb(self):
|
28 |
+
start_time = time.time()
|
29 |
+
submissions_folder = snapshot_download(
|
30 |
+
repo_id=self.competition_id,
|
31 |
+
allow_patterns="submission_info/*.json",
|
32 |
+
use_auth_token=self.token,
|
33 |
+
repo_type="dataset",
|
34 |
+
)
|
35 |
+
logger.info(f"Downloaded submissions in {time.time() - start_time} seconds")
|
36 |
+
start_time = time.time()
|
37 |
+
submissions = []
|
38 |
+
for submission in glob.glob(os.path.join(submissions_folder, "submission_info", "*.json")):
|
39 |
+
with open(submission, "r", encoding="utf-8") as f:
|
40 |
+
submission_info = json.load(f)
|
41 |
+
# only select submissions that are done
|
42 |
+
submission_info["submissions"] = [
|
43 |
+
sub for sub in submission_info["submissions"] if sub["status"] == SubmissionStatus.SUCCESS.value
|
44 |
+
]
|
45 |
+
submission_info["submissions"] = [
|
46 |
+
sub
|
47 |
+
for sub in submission_info["submissions"]
|
48 |
+
if datetime.strptime(sub["datetime"], "%Y-%m-%d %H:%M:%S") < self.end_date
|
49 |
+
]
|
50 |
+
if len(submission_info["submissions"]) == 0:
|
51 |
+
continue
|
52 |
+
|
53 |
+
user_id = submission_info["id"]
|
54 |
+
user_submissions = []
|
55 |
+
for sub in submission_info["submissions"]:
|
56 |
+
_sub = {
|
57 |
+
"id": user_id,
|
58 |
+
# "submission_id": sub["submission_id"],
|
59 |
+
# "submission_comment": sub["submission_comment"],
|
60 |
+
# "status": sub["status"],
|
61 |
+
# "selected": sub["selected"],
|
62 |
+
}
|
63 |
+
for k, v in sub["public_score"].items():
|
64 |
+
_sub[k] = v
|
65 |
+
_sub["submission_datetime"] = sub["datetime"]
|
66 |
+
user_submissions.append(_sub)
|
67 |
+
|
68 |
+
user_submissions.sort(key=lambda x: x[self.scoring_metric], reverse=self.eval_higher_is_better)
|
69 |
+
best_user_submission = user_submissions[0]
|
70 |
+
submissions.append(best_user_submission)
|
71 |
+
logger.info(f"Processed submissions in {time.time() - start_time} seconds")
|
72 |
+
return submissions
|
73 |
+
|
74 |
+
def _process_private_lb(self):
|
75 |
+
start_time = time.time()
|
76 |
+
submissions_folder = snapshot_download(
|
77 |
+
repo_id=self.competition_id,
|
78 |
+
allow_patterns="submission_info/*.json",
|
79 |
+
use_auth_token=self.token,
|
80 |
+
repo_type="dataset",
|
81 |
+
)
|
82 |
+
logger.info(f"Downloaded submissions in {time.time() - start_time} seconds")
|
83 |
+
start_time = time.time()
|
84 |
+
submissions = []
|
85 |
+
for submission in glob.glob(os.path.join(submissions_folder, "submission_info", "*.json")):
|
86 |
+
with open(submission, "r", encoding="utf-8") as f:
|
87 |
+
submission_info = json.load(f)
|
88 |
+
submission_info["submissions"] = [
|
89 |
+
sub for sub in submission_info["submissions"] if sub["status"] == SubmissionStatus.SUCCESS.value
|
90 |
+
]
|
91 |
+
if len(submission_info["submissions"]) == 0:
|
92 |
+
continue
|
93 |
+
|
94 |
+
user_id = submission_info["id"]
|
95 |
+
user_submissions = []
|
96 |
+
for sub in submission_info["submissions"]:
|
97 |
+
_sub = {
|
98 |
+
"id": user_id,
|
99 |
+
# "submission_id": sub["submission_id"],
|
100 |
+
# "submission_comment": sub["submission_comment"],
|
101 |
+
# "status": sub["status"],
|
102 |
+
"selected": sub["selected"],
|
103 |
+
}
|
104 |
+
for k, v in sub["public_score"].items():
|
105 |
+
_sub[f"public_{k}"] = v
|
106 |
+
for k, v in sub["private_score"].items():
|
107 |
+
_sub[f"private_{k}"] = v
|
108 |
+
_sub["submission_datetime"] = sub["datetime"]
|
109 |
+
user_submissions.append(_sub)
|
110 |
+
|
111 |
+
# count the number of submissions which are selected
|
112 |
+
selected_submissions = 0
|
113 |
+
for sub in user_submissions:
|
114 |
+
if sub["selected"]:
|
115 |
+
selected_submissions += 1
|
116 |
+
|
117 |
+
if selected_submissions == 0:
|
118 |
+
# select submissions with best public score
|
119 |
+
user_submissions.sort(
|
120 |
+
key=lambda x: x[f"public_{self.scoring_metric}"], reverse=self.eval_higher_is_better
|
121 |
+
)
|
122 |
+
# select only the best submission
|
123 |
+
best_user_submission = user_submissions[0]
|
124 |
+
|
125 |
+
elif selected_submissions <= self.max_selected_submissions:
|
126 |
+
# select only the selected submissions
|
127 |
+
user_submissions = [sub for sub in user_submissions if sub["selected"]]
|
128 |
+
# sort by private score
|
129 |
+
user_submissions.sort(
|
130 |
+
key=lambda x: x[f"private_{self.scoring_metric}"], reverse=self.eval_higher_is_better
|
131 |
+
)
|
132 |
+
# select only the best submission
|
133 |
+
best_user_submission = user_submissions[0]
|
134 |
+
else:
|
135 |
+
logger.warning(
|
136 |
+
f"User {user_id} has more than {self.max_selected_submissions} selected submissions. Skipping user..."
|
137 |
+
)
|
138 |
+
continue
|
139 |
+
|
140 |
+
# remove all keys that start with "public_"
|
141 |
+
best_user_submission = {k: v for k, v in best_user_submission.items() if not k.startswith("public_")}
|
142 |
+
|
143 |
+
# remove private_ from the keys
|
144 |
+
best_user_submission = {k.replace("private_", ""): v for k, v in best_user_submission.items()}
|
145 |
+
|
146 |
+
# remove selected key
|
147 |
+
best_user_submission.pop("selected")
|
148 |
+
submissions.append(best_user_submission)
|
149 |
+
logger.info(f"Processed submissions in {time.time() - start_time} seconds")
|
150 |
+
return submissions
|
151 |
+
|
152 |
+
def fetch(self, private=False):
|
153 |
+
if private:
|
154 |
+
submissions = self._process_private_lb()
|
155 |
+
else:
|
156 |
+
submissions = self._process_public_lb()
|
157 |
+
|
158 |
+
if len(submissions) == 0:
|
159 |
+
return pd.DataFrame()
|
160 |
+
|
161 |
+
df = pd.DataFrame(submissions)
|
162 |
+
|
163 |
+
# convert submission datetime to pandas datetime
|
164 |
+
df["submission_datetime"] = pd.to_datetime(df["submission_datetime"], format="%Y-%m-%d %H:%M:%S")
|
165 |
+
|
166 |
+
# only keep submissions before the end date
|
167 |
+
df = df[df["submission_datetime"] < self.end_date].reset_index(drop=True)
|
168 |
+
|
169 |
+
# sort by submission datetime
|
170 |
+
# sort by public score and submission datetime
|
171 |
+
if self.eval_higher_is_better:
|
172 |
+
if private:
|
173 |
+
df = df.sort_values(
|
174 |
+
by=[self.scoring_metric, "submission_datetime"],
|
175 |
+
ascending=[False, True],
|
176 |
+
)
|
177 |
+
else:
|
178 |
+
df = df.sort_values(
|
179 |
+
by=[self.scoring_metric, "submission_datetime"],
|
180 |
+
ascending=[False, True],
|
181 |
+
)
|
182 |
+
else:
|
183 |
+
if private:
|
184 |
+
df = df.sort_values(
|
185 |
+
by=[self.scoring_metric, "submission_datetime"],
|
186 |
+
ascending=[True, True],
|
187 |
+
)
|
188 |
+
else:
|
189 |
+
df = df.sort_values(
|
190 |
+
by=[self.scoring_metric, "submission_datetime"],
|
191 |
+
ascending=[True, True],
|
192 |
+
)
|
193 |
+
|
194 |
+
# only keep 4 significant digits in the scores
|
195 |
+
for col in df.columns:
|
196 |
+
if col in self.non_score_columns:
|
197 |
+
continue
|
198 |
+
df[col] = df[col].round(4)
|
199 |
+
|
200 |
+
# reset index
|
201 |
+
df = df.reset_index(drop=True)
|
202 |
+
df["rank"] = df.index + 1
|
203 |
+
|
204 |
+
# convert datetime column to string
|
205 |
+
df["submission_datetime"] = df["submission_datetime"].dt.strftime("%Y-%m-%d %H:%M:%S")
|
206 |
+
|
207 |
+
# send submission_datetime to the end
|
208 |
+
columns = df.columns.tolist()
|
209 |
+
columns.remove("submission_datetime")
|
210 |
+
columns.append("submission_datetime")
|
211 |
+
df = df[columns]
|
212 |
+
|
213 |
+
# send rank to first position
|
214 |
+
columns = df.columns.tolist()
|
215 |
+
columns.remove("rank")
|
216 |
+
columns = ["rank"] + columns
|
217 |
+
df = df[columns]
|
218 |
+
|
219 |
+
team_metadata = hf_hub_download(
|
220 |
+
repo_id=self.competition_id,
|
221 |
+
filename="teams.json",
|
222 |
+
token=self.token,
|
223 |
+
repo_type="dataset",
|
224 |
+
)
|
225 |
+
with open(team_metadata, "r", encoding="utf-8") as f:
|
226 |
+
team_metadata = json.load(f)
|
227 |
+
|
228 |
+
df["id"] = df["id"].apply(lambda x: team_metadata[x]["name"])
|
229 |
+
|
230 |
+
return df
|
competitions/oauth.py
ADDED
@@ -0,0 +1,122 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""OAuth support for AutoTrain.
|
2 |
+
Taken from: https://github.com/gradio-app/gradio/blob/main/gradio/oauth.py
|
3 |
+
"""
|
4 |
+
|
5 |
+
from __future__ import annotations
|
6 |
+
|
7 |
+
import hashlib
|
8 |
+
import os
|
9 |
+
import urllib.parse
|
10 |
+
|
11 |
+
import fastapi
|
12 |
+
from authlib.integrations.base_client.errors import MismatchingStateError
|
13 |
+
from authlib.integrations.starlette_client import OAuth
|
14 |
+
from fastapi.responses import RedirectResponse
|
15 |
+
from starlette.middleware.sessions import SessionMiddleware
|
16 |
+
|
17 |
+
|
18 |
+
OAUTH_CLIENT_ID = os.environ.get("OAUTH_CLIENT_ID")
|
19 |
+
OAUTH_CLIENT_SECRET = os.environ.get("OAUTH_CLIENT_SECRET")
|
20 |
+
OAUTH_SCOPES = os.environ.get("OAUTH_SCOPES")
|
21 |
+
OPENID_PROVIDER_URL = os.environ.get("OPENID_PROVIDER_URL")
|
22 |
+
|
23 |
+
|
24 |
+
def attach_oauth(app: fastapi.FastAPI):
|
25 |
+
if os.environ.get("USER_TOKEN") is not None:
|
26 |
+
return
|
27 |
+
_add_oauth_routes(app)
|
28 |
+
# Session Middleware requires a secret key to sign the cookies. Let's use a hash
|
29 |
+
# of the OAuth secret key to make it unique to the Space + updated in case OAuth
|
30 |
+
# config gets updated.
|
31 |
+
session_secret = OAUTH_CLIENT_SECRET + "-competitions-v1"
|
32 |
+
# ^ if we change the session cookie format in the future, we can bump the version of the session secret to make
|
33 |
+
# sure cookies are invalidated. Otherwise some users with an old cookie format might get a HTTP 500 error.
|
34 |
+
app.add_middleware(
|
35 |
+
SessionMiddleware,
|
36 |
+
secret_key=hashlib.sha256(session_secret.encode()).hexdigest(),
|
37 |
+
https_only=True,
|
38 |
+
same_site="none",
|
39 |
+
)
|
40 |
+
|
41 |
+
|
42 |
+
def _add_oauth_routes(app: fastapi.FastAPI) -> None:
|
43 |
+
"""Add OAuth routes to the FastAPI app (login, callback handler and logout)."""
|
44 |
+
# Check environment variables
|
45 |
+
msg = (
|
46 |
+
"OAuth is required but {} environment variable is not set. Make sure you've enabled OAuth in your Space by"
|
47 |
+
" setting `hf_oauth: true` in the Space metadata."
|
48 |
+
)
|
49 |
+
if OAUTH_CLIENT_ID is None:
|
50 |
+
raise ValueError(msg.format("OAUTH_CLIENT_ID"))
|
51 |
+
if OAUTH_CLIENT_SECRET is None:
|
52 |
+
raise ValueError(msg.format("OAUTH_CLIENT_SECRET"))
|
53 |
+
if OAUTH_SCOPES is None:
|
54 |
+
raise ValueError(msg.format("OAUTH_SCOPES"))
|
55 |
+
if OPENID_PROVIDER_URL is None:
|
56 |
+
raise ValueError(msg.format("OPENID_PROVIDER_URL"))
|
57 |
+
|
58 |
+
# Register OAuth server
|
59 |
+
oauth = OAuth()
|
60 |
+
oauth.register(
|
61 |
+
name="huggingface",
|
62 |
+
client_id=OAUTH_CLIENT_ID,
|
63 |
+
client_secret=OAUTH_CLIENT_SECRET,
|
64 |
+
client_kwargs={"scope": OAUTH_SCOPES},
|
65 |
+
server_metadata_url=OPENID_PROVIDER_URL + "/.well-known/openid-configuration",
|
66 |
+
)
|
67 |
+
|
68 |
+
# Define OAuth routes
|
69 |
+
@app.get("/login/huggingface")
|
70 |
+
async def oauth_login(request: fastapi.Request):
|
71 |
+
"""Endpoint that redirects to HF OAuth page."""
|
72 |
+
redirect_uri = request.url_for("auth")
|
73 |
+
redirect_uri_as_str = str(redirect_uri)
|
74 |
+
if redirect_uri.netloc.endswith(".hf.space"):
|
75 |
+
redirect_uri_as_str = redirect_uri_as_str.replace("http://", "https://")
|
76 |
+
return await oauth.huggingface.authorize_redirect(request, redirect_uri_as_str) # type: ignore
|
77 |
+
|
78 |
+
@app.get("/auth")
|
79 |
+
async def auth(request: fastapi.Request) -> RedirectResponse:
|
80 |
+
"""Endpoint that handles the OAuth callback."""
|
81 |
+
try:
|
82 |
+
oauth_info = await oauth.huggingface.authorize_access_token(request) # type: ignore
|
83 |
+
except MismatchingStateError:
|
84 |
+
# If the state mismatch, it is very likely that the cookie is corrupted.
|
85 |
+
# There is a bug reported in authlib that causes the token to grow indefinitely if the user tries to login
|
86 |
+
# repeatedly. Since cookies cannot get bigger than 4kb, the token will be truncated at some point - hence
|
87 |
+
# losing the state. A workaround is to delete the cookie and redirect the user to the login page again.
|
88 |
+
# See https://github.com/lepture/authlib/issues/622 for more details.
|
89 |
+
login_uri = "/login/huggingface"
|
90 |
+
if "_target_url" in request.query_params:
|
91 |
+
login_uri += "?" + urllib.parse.urlencode( # Keep same _target_url as before
|
92 |
+
{"_target_url": request.query_params["_target_url"]}
|
93 |
+
)
|
94 |
+
for key in list(request.session.keys()):
|
95 |
+
# Delete all keys that are related to the OAuth state
|
96 |
+
if key.startswith("_state_huggingface"):
|
97 |
+
request.session.pop(key)
|
98 |
+
return RedirectResponse(login_uri)
|
99 |
+
|
100 |
+
request.session["oauth_info"] = oauth_info
|
101 |
+
return _redirect_to_target(request)
|
102 |
+
|
103 |
+
|
104 |
+
def _generate_redirect_uri(request: fastapi.Request) -> str:
|
105 |
+
if "_target_url" in request.query_params:
|
106 |
+
# if `_target_url` already in query params => respect it
|
107 |
+
target = request.query_params["_target_url"]
|
108 |
+
else:
|
109 |
+
# otherwise => keep query params
|
110 |
+
target = "/?" + urllib.parse.urlencode(request.query_params)
|
111 |
+
|
112 |
+
redirect_uri = request.url_for("oauth_redirect_callback").include_query_params(_target_url=target)
|
113 |
+
redirect_uri_as_str = str(redirect_uri)
|
114 |
+
if redirect_uri.netloc.endswith(".hf.space"):
|
115 |
+
# In Space, FastAPI redirect as http but we want https
|
116 |
+
redirect_uri_as_str = redirect_uri_as_str.replace("http://", "https://")
|
117 |
+
return redirect_uri_as_str
|
118 |
+
|
119 |
+
|
120 |
+
def _redirect_to_target(request: fastapi.Request, default_target: str = "/") -> RedirectResponse:
|
121 |
+
target = request.query_params.get("_target_url", default_target)
|
122 |
+
return RedirectResponse(target)
|
competitions/params.py
ADDED
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
from typing import List
|
3 |
+
|
4 |
+
from pydantic import BaseModel
|
5 |
+
|
6 |
+
|
7 |
+
class EvalParams(BaseModel):
|
8 |
+
competition_id: str
|
9 |
+
competition_type: str
|
10 |
+
metric: str
|
11 |
+
token: str
|
12 |
+
team_id: str
|
13 |
+
submission_id: str
|
14 |
+
submission_id_col: str
|
15 |
+
submission_cols: List[str]
|
16 |
+
submission_rows: int
|
17 |
+
output_path: str
|
18 |
+
submission_repo: str
|
19 |
+
time_limit: int
|
20 |
+
dataset: str
|
21 |
+
submission_filenames: List[str]
|
22 |
+
|
23 |
+
class Config:
|
24 |
+
protected_namespaces = ()
|
25 |
+
|
26 |
+
def save(self, output_dir):
|
27 |
+
"""
|
28 |
+
Save parameters to a json file.
|
29 |
+
"""
|
30 |
+
os.makedirs(output_dir, exist_ok=True)
|
31 |
+
path = os.path.join(output_dir, "params.json")
|
32 |
+
# save formatted json
|
33 |
+
with open(path, "w", encoding="utf-8") as f:
|
34 |
+
f.write(self.model_dump_json(indent=4))
|
competitions/runner.py
ADDED
@@ -0,0 +1,230 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import glob
|
2 |
+
import io
|
3 |
+
import json
|
4 |
+
import os
|
5 |
+
import time
|
6 |
+
from dataclasses import dataclass
|
7 |
+
|
8 |
+
import pandas as pd
|
9 |
+
from huggingface_hub import HfApi, hf_hub_download, snapshot_download
|
10 |
+
from loguru import logger
|
11 |
+
|
12 |
+
from competitions.enums import SubmissionStatus
|
13 |
+
from competitions.info import CompetitionInfo
|
14 |
+
from competitions.utils import run_evaluation
|
15 |
+
|
16 |
+
|
17 |
+
_DOCKERFILE = """
|
18 |
+
FROM huggingface/competitions:latest
|
19 |
+
|
20 |
+
CMD uvicorn competitions.api:api --port 7860 --host 0.0.0.0
|
21 |
+
"""
|
22 |
+
|
23 |
+
# format _DOCKERFILE
|
24 |
+
_DOCKERFILE = _DOCKERFILE.replace("\n", " ").replace(" ", "\n").strip()
|
25 |
+
|
26 |
+
|
27 |
+
@dataclass
|
28 |
+
class JobRunner:
|
29 |
+
competition_id: str
|
30 |
+
token: str
|
31 |
+
output_path: str
|
32 |
+
|
33 |
+
def __post_init__(self):
|
34 |
+
self.competition_info = CompetitionInfo(competition_id=self.competition_id, autotrain_token=self.token)
|
35 |
+
self.competition_id = self.competition_info.competition_id
|
36 |
+
self.competition_type = self.competition_info.competition_type
|
37 |
+
self.metric = self.competition_info.metric
|
38 |
+
self.submission_id_col = self.competition_info.submission_id_col
|
39 |
+
self.submission_cols = self.competition_info.submission_cols
|
40 |
+
self.submission_rows = self.competition_info.submission_rows
|
41 |
+
self.time_limit = self.competition_info.time_limit
|
42 |
+
self.dataset = self.competition_info.dataset
|
43 |
+
self.submission_filenames = self.competition_info.submission_filenames
|
44 |
+
|
45 |
+
def get_pending_subs(self):
|
46 |
+
submission_jsons = snapshot_download(
|
47 |
+
repo_id=self.competition_id,
|
48 |
+
allow_patterns="submission_info/*.json",
|
49 |
+
token=self.token,
|
50 |
+
repo_type="dataset",
|
51 |
+
)
|
52 |
+
submission_jsons = glob.glob(os.path.join(submission_jsons, "submission_info/*.json"))
|
53 |
+
pending_submissions = []
|
54 |
+
for _json in submission_jsons:
|
55 |
+
_json = json.load(open(_json, "r", encoding="utf-8"))
|
56 |
+
team_id = _json["id"]
|
57 |
+
for sub in _json["submissions"]:
|
58 |
+
if sub["status"] == SubmissionStatus.PENDING.value:
|
59 |
+
pending_submissions.append(
|
60 |
+
{
|
61 |
+
"team_id": team_id,
|
62 |
+
"submission_id": sub["submission_id"],
|
63 |
+
"datetime": sub["datetime"],
|
64 |
+
"submission_repo": sub["submission_repo"],
|
65 |
+
"space_id": sub["space_id"],
|
66 |
+
}
|
67 |
+
)
|
68 |
+
if len(pending_submissions) == 0:
|
69 |
+
return None
|
70 |
+
logger.info(f"Found {len(pending_submissions)} pending submissions.")
|
71 |
+
pending_submissions = pd.DataFrame(pending_submissions)
|
72 |
+
pending_submissions["datetime"] = pd.to_datetime(pending_submissions["datetime"])
|
73 |
+
pending_submissions = pending_submissions.sort_values("datetime")
|
74 |
+
pending_submissions = pending_submissions.reset_index(drop=True)
|
75 |
+
return pending_submissions
|
76 |
+
|
77 |
+
def _queue_submission(self, team_id, submission_id):
|
78 |
+
team_fname = hf_hub_download(
|
79 |
+
repo_id=self.competition_id,
|
80 |
+
filename=f"submission_info/{team_id}.json",
|
81 |
+
token=self.token,
|
82 |
+
repo_type="dataset",
|
83 |
+
)
|
84 |
+
with open(team_fname, "r", encoding="utf-8") as f:
|
85 |
+
team_submission_info = json.load(f)
|
86 |
+
|
87 |
+
for submission in team_submission_info["submissions"]:
|
88 |
+
if submission["submission_id"] == submission_id:
|
89 |
+
submission["status"] = SubmissionStatus.QUEUED.value
|
90 |
+
break
|
91 |
+
|
92 |
+
team_submission_info_json = json.dumps(team_submission_info, indent=4)
|
93 |
+
team_submission_info_json_bytes = team_submission_info_json.encode("utf-8")
|
94 |
+
team_submission_info_json_buffer = io.BytesIO(team_submission_info_json_bytes)
|
95 |
+
api = HfApi(token=self.token)
|
96 |
+
api.upload_file(
|
97 |
+
path_or_fileobj=team_submission_info_json_buffer,
|
98 |
+
path_in_repo=f"submission_info/{team_id}.json",
|
99 |
+
repo_id=self.competition_id,
|
100 |
+
repo_type="dataset",
|
101 |
+
)
|
102 |
+
|
103 |
+
def mark_submission_failed(self, team_id, submission_id):
|
104 |
+
team_fname = hf_hub_download(
|
105 |
+
repo_id=self.competition_id,
|
106 |
+
filename=f"submission_info/{team_id}.json",
|
107 |
+
token=self.token,
|
108 |
+
repo_type="dataset",
|
109 |
+
)
|
110 |
+
with open(team_fname, "r", encoding="utf-8") as f:
|
111 |
+
team_submission_info = json.load(f)
|
112 |
+
|
113 |
+
for submission in team_submission_info["submissions"]:
|
114 |
+
if submission["submission_id"] == submission_id:
|
115 |
+
submission["status"] = SubmissionStatus.FAILED.value
|
116 |
+
|
117 |
+
team_submission_info_json = json.dumps(team_submission_info, indent=4)
|
118 |
+
team_submission_info_json_bytes = team_submission_info_json.encode("utf-8")
|
119 |
+
team_submission_info_json_buffer = io.BytesIO(team_submission_info_json_bytes)
|
120 |
+
|
121 |
+
api = HfApi(token=self.token)
|
122 |
+
api.upload_file(
|
123 |
+
path_or_fileobj=team_submission_info_json_buffer,
|
124 |
+
path_in_repo=f"submission_info/{team_id}.json",
|
125 |
+
repo_id=self.competition_id,
|
126 |
+
repo_type="dataset",
|
127 |
+
)
|
128 |
+
|
129 |
+
def run_local(self, team_id, submission_id, submission_repo):
|
130 |
+
self._queue_submission(team_id, submission_id)
|
131 |
+
eval_params = {
|
132 |
+
"competition_id": self.competition_id,
|
133 |
+
"competition_type": self.competition_type,
|
134 |
+
"metric": self.metric,
|
135 |
+
"token": self.token,
|
136 |
+
"team_id": team_id,
|
137 |
+
"submission_id": submission_id,
|
138 |
+
"submission_id_col": self.submission_id_col,
|
139 |
+
"submission_cols": self.submission_cols,
|
140 |
+
"submission_rows": self.submission_rows,
|
141 |
+
"output_path": self.output_path,
|
142 |
+
"submission_repo": submission_repo,
|
143 |
+
"time_limit": self.time_limit,
|
144 |
+
"dataset": self.dataset,
|
145 |
+
"submission_filenames": self.submission_filenames,
|
146 |
+
}
|
147 |
+
eval_params = json.dumps(eval_params)
|
148 |
+
eval_pid = run_evaluation(eval_params, local=True, wait=True)
|
149 |
+
logger.info(f"New evaluation process started with pid {eval_pid}.")
|
150 |
+
|
151 |
+
def _create_readme(self, project_name):
|
152 |
+
_readme = "---\n"
|
153 |
+
_readme += f"title: {project_name}\n"
|
154 |
+
_readme += "emoji: 🚀\n"
|
155 |
+
_readme += "colorFrom: green\n"
|
156 |
+
_readme += "colorTo: indigo\n"
|
157 |
+
_readme += "sdk: docker\n"
|
158 |
+
_readme += "pinned: false\n"
|
159 |
+
_readme += "duplicated_from: autotrain-projects/autotrain-advanced\n"
|
160 |
+
_readme += "---\n"
|
161 |
+
_readme = io.BytesIO(_readme.encode())
|
162 |
+
return _readme
|
163 |
+
|
164 |
+
def create_space(self, team_id, submission_id, submission_repo, space_id):
|
165 |
+
api = HfApi(token=self.token)
|
166 |
+
params = {
|
167 |
+
"competition_id": self.competition_id,
|
168 |
+
"competition_type": self.competition_type,
|
169 |
+
"metric": self.metric,
|
170 |
+
"token": self.token,
|
171 |
+
"team_id": team_id,
|
172 |
+
"submission_id": submission_id,
|
173 |
+
"submission_id_col": self.submission_id_col,
|
174 |
+
"submission_cols": self.submission_cols,
|
175 |
+
"submission_rows": self.submission_rows,
|
176 |
+
"output_path": self.output_path,
|
177 |
+
"submission_repo": submission_repo,
|
178 |
+
"time_limit": self.time_limit,
|
179 |
+
"dataset": self.dataset,
|
180 |
+
"submission_filenames": self.submission_filenames,
|
181 |
+
}
|
182 |
+
|
183 |
+
api.add_space_secret(repo_id=space_id, key="PARAMS", value=json.dumps(params))
|
184 |
+
|
185 |
+
readme = self._create_readme(space_id.split("/")[-1])
|
186 |
+
api.upload_file(
|
187 |
+
path_or_fileobj=readme,
|
188 |
+
path_in_repo="README.md",
|
189 |
+
repo_id=space_id,
|
190 |
+
repo_type="space",
|
191 |
+
)
|
192 |
+
|
193 |
+
_dockerfile = io.BytesIO(_DOCKERFILE.encode())
|
194 |
+
api.upload_file(
|
195 |
+
path_or_fileobj=_dockerfile,
|
196 |
+
path_in_repo="Dockerfile",
|
197 |
+
repo_id=space_id,
|
198 |
+
repo_type="space",
|
199 |
+
)
|
200 |
+
self._queue_submission(team_id, submission_id)
|
201 |
+
|
202 |
+
def run(self):
|
203 |
+
while True:
|
204 |
+
pending_submissions = self.get_pending_subs()
|
205 |
+
if pending_submissions is None:
|
206 |
+
time.sleep(5)
|
207 |
+
continue
|
208 |
+
if self.competition_type == "generic":
|
209 |
+
for _, row in pending_submissions.iterrows():
|
210 |
+
team_id = row["team_id"]
|
211 |
+
submission_id = row["submission_id"]
|
212 |
+
submission_repo = row["submission_repo"]
|
213 |
+
self.run_local(team_id, submission_id, submission_repo)
|
214 |
+
elif self.competition_type == "script":
|
215 |
+
for _, row in pending_submissions.iterrows():
|
216 |
+
team_id = row["team_id"]
|
217 |
+
submission_id = row["submission_id"]
|
218 |
+
submission_repo = row["submission_repo"]
|
219 |
+
space_id = row["space_id"]
|
220 |
+
try:
|
221 |
+
self.create_space(team_id, submission_id, submission_repo, space_id)
|
222 |
+
except Exception as e:
|
223 |
+
logger.error(
|
224 |
+
f"Failed to create space for {team_id} {submission_id} {submission_repo} {space_id}: {e}"
|
225 |
+
)
|
226 |
+
# mark submission as failed
|
227 |
+
self.mark_submission_failed(team_id, submission_id)
|
228 |
+
logger.error(f"Marked submission {submission_id} as failed.")
|
229 |
+
continue
|
230 |
+
time.sleep(5)
|
competitions/static/.keep
ADDED
File without changes
|
competitions/submissions.py
ADDED
@@ -0,0 +1,336 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io
|
2 |
+
import json
|
3 |
+
import uuid
|
4 |
+
from dataclasses import dataclass
|
5 |
+
from datetime import datetime
|
6 |
+
|
7 |
+
import pandas as pd
|
8 |
+
from huggingface_hub import HfApi, hf_hub_download
|
9 |
+
|
10 |
+
from competitions.enums import SubmissionStatus
|
11 |
+
from competitions.errors import AuthenticationError, PastDeadlineError, SubmissionError, SubmissionLimitError
|
12 |
+
from competitions.utils import token_information
|
13 |
+
|
14 |
+
|
15 |
+
@dataclass
|
16 |
+
class Submissions:
|
17 |
+
competition_id: str
|
18 |
+
competition_type: str
|
19 |
+
submission_limit: str
|
20 |
+
hardware: str
|
21 |
+
end_date: datetime
|
22 |
+
token: str
|
23 |
+
|
24 |
+
def _verify_submission(self, bytes_data):
|
25 |
+
return True
|
26 |
+
|
27 |
+
def _num_subs_today(self, todays_date, team_submission_info):
|
28 |
+
todays_submissions = 0
|
29 |
+
for sub in team_submission_info["submissions"]:
|
30 |
+
submission_datetime = sub["datetime"]
|
31 |
+
submission_date = submission_datetime.split(" ")[0]
|
32 |
+
if submission_date == todays_date:
|
33 |
+
todays_submissions += 1
|
34 |
+
return todays_submissions
|
35 |
+
|
36 |
+
def _is_submission_allowed(self, team_id):
|
37 |
+
todays_date = datetime.now()
|
38 |
+
if todays_date > self.end_date:
|
39 |
+
raise PastDeadlineError("Competition has ended.")
|
40 |
+
|
41 |
+
todays_date = todays_date.strftime("%Y-%m-%d")
|
42 |
+
team_submission_info = self._download_team_submissions(team_id)
|
43 |
+
|
44 |
+
if len(team_submission_info["submissions"]) == 0:
|
45 |
+
team_submission_info["submissions"] = []
|
46 |
+
|
47 |
+
todays_submissions = self._num_subs_today(todays_date, team_submission_info)
|
48 |
+
if todays_submissions >= self.submission_limit:
|
49 |
+
return False
|
50 |
+
return True
|
51 |
+
|
52 |
+
def _increment_submissions(
|
53 |
+
self,
|
54 |
+
team_id,
|
55 |
+
user_id,
|
56 |
+
submission_id,
|
57 |
+
submission_comment,
|
58 |
+
submission_repo=None,
|
59 |
+
space_id=None,
|
60 |
+
):
|
61 |
+
if submission_repo is None:
|
62 |
+
submission_repo = ""
|
63 |
+
if space_id is None:
|
64 |
+
space_id = ""
|
65 |
+
team_fname = hf_hub_download(
|
66 |
+
repo_id=self.competition_id,
|
67 |
+
filename=f"submission_info/{team_id}.json",
|
68 |
+
token=self.token,
|
69 |
+
repo_type="dataset",
|
70 |
+
)
|
71 |
+
with open(team_fname, "r", encoding="utf-8") as f:
|
72 |
+
team_submission_info = json.load(f)
|
73 |
+
datetime_now = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
74 |
+
|
75 |
+
# here goes all the default stuff for submission
|
76 |
+
team_submission_info["submissions"].append(
|
77 |
+
{
|
78 |
+
"datetime": datetime_now,
|
79 |
+
"submission_id": submission_id,
|
80 |
+
"submission_comment": submission_comment,
|
81 |
+
"submission_repo": submission_repo,
|
82 |
+
"space_id": space_id,
|
83 |
+
"submitted_by": user_id,
|
84 |
+
"status": SubmissionStatus.PENDING.value,
|
85 |
+
"selected": False,
|
86 |
+
"public_score": {},
|
87 |
+
"private_score": {},
|
88 |
+
}
|
89 |
+
)
|
90 |
+
# count the number of times user has submitted today
|
91 |
+
todays_date = datetime.now().strftime("%Y-%m-%d")
|
92 |
+
todays_submissions = self._num_subs_today(todays_date, team_submission_info)
|
93 |
+
self._upload_team_submissions(team_id, team_submission_info)
|
94 |
+
return todays_submissions
|
95 |
+
|
96 |
+
def _upload_team_submissions(self, team_id, team_submission_info):
|
97 |
+
team_submission_info_json = json.dumps(team_submission_info, indent=4)
|
98 |
+
team_submission_info_json_bytes = team_submission_info_json.encode("utf-8")
|
99 |
+
team_submission_info_json_buffer = io.BytesIO(team_submission_info_json_bytes)
|
100 |
+
api = HfApi(token=self.token)
|
101 |
+
api.upload_file(
|
102 |
+
path_or_fileobj=team_submission_info_json_buffer,
|
103 |
+
path_in_repo=f"submission_info/{team_id}.json",
|
104 |
+
repo_id=self.competition_id,
|
105 |
+
repo_type="dataset",
|
106 |
+
)
|
107 |
+
|
108 |
+
def _download_team_submissions(self, team_id):
|
109 |
+
team_fname = hf_hub_download(
|
110 |
+
repo_id=self.competition_id,
|
111 |
+
filename=f"submission_info/{team_id}.json",
|
112 |
+
token=self.token,
|
113 |
+
repo_type="dataset",
|
114 |
+
)
|
115 |
+
with open(team_fname, "r", encoding="utf-8") as f:
|
116 |
+
team_submission_info = json.load(f)
|
117 |
+
return team_submission_info
|
118 |
+
|
119 |
+
def update_selected_submissions(self, user_token, selected_submission_ids):
|
120 |
+
current_datetime = datetime.now()
|
121 |
+
if current_datetime > self.end_date:
|
122 |
+
raise PastDeadlineError("Competition has ended.")
|
123 |
+
|
124 |
+
user_info = self._get_user_info(user_token)
|
125 |
+
team_id = self._get_team_id(user_info, create_team=False)
|
126 |
+
team_submission_info = self._download_team_submissions(team_id)
|
127 |
+
|
128 |
+
for sub in team_submission_info["submissions"]:
|
129 |
+
if sub["submission_id"] in selected_submission_ids:
|
130 |
+
sub["selected"] = True
|
131 |
+
else:
|
132 |
+
sub["selected"] = False
|
133 |
+
|
134 |
+
self._upload_team_submissions(team_id, team_submission_info)
|
135 |
+
|
136 |
+
def _get_team_subs(self, team_id, private=False):
|
137 |
+
team_submissions_info = self._download_team_submissions(team_id)
|
138 |
+
submissions_df = pd.DataFrame(team_submissions_info["submissions"])
|
139 |
+
|
140 |
+
if len(submissions_df) == 0:
|
141 |
+
return pd.DataFrame(), pd.DataFrame()
|
142 |
+
|
143 |
+
if not private:
|
144 |
+
submissions_df = submissions_df.drop(columns=["private_score"])
|
145 |
+
|
146 |
+
submissions_df = submissions_df.sort_values(by="datetime", ascending=False)
|
147 |
+
submissions_df = submissions_df.reset_index(drop=True)
|
148 |
+
|
149 |
+
# stringify public_score column
|
150 |
+
submissions_df["public_score"] = submissions_df["public_score"].apply(json.dumps)
|
151 |
+
|
152 |
+
if private:
|
153 |
+
submissions_df["private_score"] = submissions_df["private_score"].apply(json.dumps)
|
154 |
+
|
155 |
+
submissions_df["status"] = submissions_df["status"].apply(lambda x: SubmissionStatus(x).name)
|
156 |
+
|
157 |
+
return submissions_df
|
158 |
+
|
159 |
+
def _get_user_info(self, user_token):
|
160 |
+
user_info = token_information(token=user_token)
|
161 |
+
if "error" in user_info:
|
162 |
+
raise AuthenticationError("Invalid token")
|
163 |
+
|
164 |
+
# if user_info["emailVerified"] is False:
|
165 |
+
# raise AuthenticationError("Please verify your email on Hugging Face Hub")
|
166 |
+
return user_info
|
167 |
+
|
168 |
+
def my_submissions(self, user_token):
|
169 |
+
user_info = self._get_user_info(user_token)
|
170 |
+
current_date_time = datetime.now()
|
171 |
+
private = False
|
172 |
+
if current_date_time >= self.end_date:
|
173 |
+
private = True
|
174 |
+
team_id = self._get_team_id(user_info, create_team=False)
|
175 |
+
if not team_id:
|
176 |
+
return pd.DataFrame()
|
177 |
+
return self._get_team_subs(team_id, private=private)
|
178 |
+
|
179 |
+
def _create_team(self, user_team, user_id, user_name):
|
180 |
+
team_metadata = hf_hub_download(
|
181 |
+
repo_id=self.competition_id,
|
182 |
+
filename="teams.json",
|
183 |
+
token=self.token,
|
184 |
+
repo_type="dataset",
|
185 |
+
)
|
186 |
+
|
187 |
+
with open(team_metadata, "r", encoding="utf-8") as f:
|
188 |
+
team_metadata = json.load(f)
|
189 |
+
|
190 |
+
# create a new team, if user is not in any team
|
191 |
+
team_id = str(uuid.uuid4())
|
192 |
+
user_team[user_id] = team_id
|
193 |
+
|
194 |
+
team_metadata[team_id] = {
|
195 |
+
"id": team_id,
|
196 |
+
"name": user_name,
|
197 |
+
"members": [user_id],
|
198 |
+
"leader": user_id,
|
199 |
+
}
|
200 |
+
|
201 |
+
user_team_json = json.dumps(user_team, indent=4)
|
202 |
+
user_team_json_bytes = user_team_json.encode("utf-8")
|
203 |
+
user_team_json_buffer = io.BytesIO(user_team_json_bytes)
|
204 |
+
|
205 |
+
team_metadata_json = json.dumps(team_metadata, indent=4)
|
206 |
+
team_metadata_json_bytes = team_metadata_json.encode("utf-8")
|
207 |
+
team_metadata_json_buffer = io.BytesIO(team_metadata_json_bytes)
|
208 |
+
|
209 |
+
team_submission_info = {}
|
210 |
+
team_submission_info["id"] = team_id
|
211 |
+
team_submission_info["submissions"] = []
|
212 |
+
team_submission_info_json = json.dumps(team_submission_info, indent=4)
|
213 |
+
team_submission_info_json_bytes = team_submission_info_json.encode("utf-8")
|
214 |
+
team_submission_info_json_buffer = io.BytesIO(team_submission_info_json_bytes)
|
215 |
+
|
216 |
+
api = HfApi(token=self.token)
|
217 |
+
api.upload_file(
|
218 |
+
path_or_fileobj=user_team_json_buffer,
|
219 |
+
path_in_repo="user_team.json",
|
220 |
+
repo_id=self.competition_id,
|
221 |
+
repo_type="dataset",
|
222 |
+
)
|
223 |
+
api.upload_file(
|
224 |
+
path_or_fileobj=team_metadata_json_buffer,
|
225 |
+
path_in_repo="teams.json",
|
226 |
+
repo_id=self.competition_id,
|
227 |
+
repo_type="dataset",
|
228 |
+
)
|
229 |
+
api.upload_file(
|
230 |
+
path_or_fileobj=team_submission_info_json_buffer,
|
231 |
+
path_in_repo=f"submission_info/{team_id}.json",
|
232 |
+
repo_id=self.competition_id,
|
233 |
+
repo_type="dataset",
|
234 |
+
)
|
235 |
+
return team_id
|
236 |
+
|
237 |
+
def _get_team_id(self, user_info, create_team):
|
238 |
+
user_id = user_info["id"]
|
239 |
+
user_name = user_info["name"]
|
240 |
+
user_team = hf_hub_download(
|
241 |
+
repo_id=self.competition_id,
|
242 |
+
filename="user_team.json",
|
243 |
+
token=self.token,
|
244 |
+
repo_type="dataset",
|
245 |
+
)
|
246 |
+
with open(user_team, "r", encoding="utf-8") as f:
|
247 |
+
user_team = json.load(f)
|
248 |
+
|
249 |
+
if user_id in user_team:
|
250 |
+
return user_team[user_id]
|
251 |
+
|
252 |
+
if create_team is False:
|
253 |
+
return None
|
254 |
+
|
255 |
+
# if user_id is not there in user_team, create a new team
|
256 |
+
team_id = self._create_team(user_team, user_id, user_name)
|
257 |
+
return team_id
|
258 |
+
|
259 |
+
def new_submission(self, user_token, uploaded_file, submission_comment):
|
260 |
+
# verify token
|
261 |
+
user_info = self._get_user_info(user_token)
|
262 |
+
submission_id = str(uuid.uuid4())
|
263 |
+
user_id = user_info["id"]
|
264 |
+
team_id = self._get_team_id(user_info, create_team=True)
|
265 |
+
|
266 |
+
# check if team can submit to the competition
|
267 |
+
if self._is_submission_allowed(team_id) is False:
|
268 |
+
raise SubmissionLimitError("Submission limit reached")
|
269 |
+
|
270 |
+
if self.competition_type == "generic":
|
271 |
+
bytes_data = uploaded_file.file.read()
|
272 |
+
# verify file is valid
|
273 |
+
if not self._verify_submission(bytes_data):
|
274 |
+
raise SubmissionError("Invalid submission file")
|
275 |
+
|
276 |
+
file_extension = uploaded_file.filename.split(".")[-1]
|
277 |
+
# upload file to hf hub
|
278 |
+
api = HfApi(token=self.token)
|
279 |
+
api.upload_file(
|
280 |
+
path_or_fileobj=bytes_data,
|
281 |
+
path_in_repo=f"submissions/{team_id}-{submission_id}.{file_extension}",
|
282 |
+
repo_id=self.competition_id,
|
283 |
+
repo_type="dataset",
|
284 |
+
)
|
285 |
+
submissions_made = self._increment_submissions(
|
286 |
+
team_id=team_id,
|
287 |
+
user_id=user_id,
|
288 |
+
submission_id=submission_id,
|
289 |
+
submission_comment=submission_comment,
|
290 |
+
)
|
291 |
+
else:
|
292 |
+
# Download the submission repo and upload it to the competition repo
|
293 |
+
# submission_repo = snapshot_download(
|
294 |
+
# repo_id=uploaded_file,
|
295 |
+
# local_dir=submission_id,
|
296 |
+
# token=user_token,
|
297 |
+
# repo_type="model",
|
298 |
+
# )
|
299 |
+
# api = HfApi(token=self.token)
|
300 |
+
# competition_user = self.competition_id.split("/")[0]
|
301 |
+
# api.create_repo(
|
302 |
+
# repo_id=f"{competition_user}/{submission_id}",
|
303 |
+
# repo_type="model",
|
304 |
+
# private=True,
|
305 |
+
# )
|
306 |
+
# api.upload_folder(
|
307 |
+
# folder_path=submission_repo,
|
308 |
+
# repo_id=f"{competition_user}/{submission_id}",
|
309 |
+
# repo_type="model",
|
310 |
+
# )
|
311 |
+
# create barebones submission runner space
|
312 |
+
user_api = HfApi(token=user_token)
|
313 |
+
# submission_id is the sha of the submitted model repo + "__" + submission_id
|
314 |
+
submission_id = user_api.model_info(repo_id=uploaded_file).sha + "__" + submission_id
|
315 |
+
competition_organizer = self.competition_id.split("/")[0]
|
316 |
+
space_id = f"{competition_organizer}/comp-{submission_id}"
|
317 |
+
api = HfApi(token=self.token)
|
318 |
+
api.create_repo(
|
319 |
+
repo_id=space_id,
|
320 |
+
repo_type="space",
|
321 |
+
space_sdk="docker",
|
322 |
+
space_hardware=self.hardware,
|
323 |
+
private=True,
|
324 |
+
)
|
325 |
+
|
326 |
+
api.add_space_secret(repo_id=space_id, key="USER_TOKEN", value=user_token)
|
327 |
+
submissions_made = self._increment_submissions(
|
328 |
+
team_id=team_id,
|
329 |
+
user_id=user_id,
|
330 |
+
submission_id=submission_id,
|
331 |
+
submission_comment=submission_comment,
|
332 |
+
submission_repo=uploaded_file,
|
333 |
+
space_id=space_id,
|
334 |
+
)
|
335 |
+
remaining_submissions = self.submission_limit - submissions_made
|
336 |
+
return remaining_submissions
|
competitions/templates/index.html
ADDED
@@ -0,0 +1,951 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!doctype html>
|
2 |
+
<html>
|
3 |
+
|
4 |
+
<head>
|
5 |
+
<meta charset="UTF-8">
|
6 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
7 |
+
<script src="https://cdn.jsdelivr.net/npm/marked/marked.min.js"></script>
|
8 |
+
<script src="https://cdn.tailwindcss.com?plugins=forms,typography,aspect-ratio,line-clamp"></script>
|
9 |
+
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/5.15.3/css/all.min.css">
|
10 |
+
<link href="https://cdnjs.cloudflare.com/ajax/libs/flowbite/2.2.1/flowbite.min.css" rel="stylesheet" />
|
11 |
+
<script>
|
12 |
+
document.addEventListener('DOMContentLoaded', function () {
|
13 |
+
function showSubmissionModal() {
|
14 |
+
const modal = document.getElementById('submission-modal');
|
15 |
+
modal.classList.add('flex');
|
16 |
+
modal.classList.remove('hidden');
|
17 |
+
}
|
18 |
+
|
19 |
+
function hideSubmissionModal() {
|
20 |
+
const modal = document.getElementById('submission-modal');
|
21 |
+
modal.classList.remove('flex');
|
22 |
+
modal.classList.add('hidden');
|
23 |
+
}
|
24 |
+
|
25 |
+
function addTargetBlankToLinks() {
|
26 |
+
const content = document.getElementById('content');
|
27 |
+
const links = content.getElementsByTagName('a');
|
28 |
+
|
29 |
+
for (let i = 0; i < links.length; i++) {
|
30 |
+
if (!links[i].hasAttribute('target')) {
|
31 |
+
links[i].setAttribute('target', '_blank');
|
32 |
+
}
|
33 |
+
}
|
34 |
+
}
|
35 |
+
|
36 |
+
function fetchAndDisplayCompetitionInfo() {
|
37 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
38 |
+
articleLoadingSpinner.classList.remove('hidden');
|
39 |
+
fetch('/competition_info')
|
40 |
+
.then(response => {
|
41 |
+
if (!response.ok) {
|
42 |
+
throw new Error('Network response was not ok');
|
43 |
+
}
|
44 |
+
return response.json(); // Parse the JSON response
|
45 |
+
})
|
46 |
+
.then(data => {
|
47 |
+
// Populate the 'content' div with the HTML from the response
|
48 |
+
const contentDiv = document.getElementById('content');
|
49 |
+
contentDiv.style.display = 'block';
|
50 |
+
contentDiv.innerHTML = marked.parse(data.response);
|
51 |
+
addTargetBlankToLinks();
|
52 |
+
articleLoadingSpinner.classList.add('hidden');
|
53 |
+
})
|
54 |
+
.catch(error => {
|
55 |
+
console.error('There has been a problem with your fetch operation:', error);
|
56 |
+
articleLoadingSpinner.classList.add('hidden');
|
57 |
+
});
|
58 |
+
}
|
59 |
+
|
60 |
+
function fetchAndDisplayDatasetInfo() {
|
61 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
62 |
+
articleLoadingSpinner.classList.remove('hidden');
|
63 |
+
fetch('/dataset_info')
|
64 |
+
.then(response => {
|
65 |
+
if (!response.ok) {
|
66 |
+
throw new Error('Network response was not ok');
|
67 |
+
}
|
68 |
+
return response.json(); // Parse the JSON response
|
69 |
+
})
|
70 |
+
.then(data => {
|
71 |
+
// Populate the 'content' div with the HTML from the response
|
72 |
+
const contentDiv = document.getElementById('content');
|
73 |
+
contentDiv.innerHTML = marked.parse(data.response);
|
74 |
+
addTargetBlankToLinks();
|
75 |
+
articleLoadingSpinner.classList.add('hidden');
|
76 |
+
})
|
77 |
+
.catch(error => {
|
78 |
+
console.error('There has been a problem with your fetch operation:', error);
|
79 |
+
articleLoadingSpinner.classList.add('hidden');
|
80 |
+
});
|
81 |
+
}
|
82 |
+
function fetchAndDisplayLeaderboard(leaderboardType) {
|
83 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
84 |
+
articleLoadingSpinner.classList.remove('hidden');
|
85 |
+
|
86 |
+
const payload = {
|
87 |
+
lb: leaderboardType,
|
88 |
+
};
|
89 |
+
|
90 |
+
fetch('/leaderboard', {
|
91 |
+
method: 'POST',
|
92 |
+
headers: {
|
93 |
+
'Content-Type': 'application/json'
|
94 |
+
},
|
95 |
+
body: JSON.stringify(payload)
|
96 |
+
})
|
97 |
+
.then(response => {
|
98 |
+
if (!response.ok) {
|
99 |
+
throw new Error('Network response was not ok');
|
100 |
+
}
|
101 |
+
return response.json();
|
102 |
+
})
|
103 |
+
.then(data => {
|
104 |
+
const contentDiv = document.getElementById('content');
|
105 |
+
contentDiv.innerHTML = marked.parse(data.response);
|
106 |
+
articleLoadingSpinner.classList.add('hidden');
|
107 |
+
})
|
108 |
+
.catch(error => {
|
109 |
+
console.error('There has been a problem with your fetch operation:', error);
|
110 |
+
articleLoadingSpinner.classList.add('hidden');
|
111 |
+
});
|
112 |
+
}
|
113 |
+
|
114 |
+
function fetchAndDisplayPublicLeaderboard() {
|
115 |
+
fetchAndDisplayLeaderboard('public');
|
116 |
+
}
|
117 |
+
|
118 |
+
function fetchAndDisplayPrivateLeaderboard() {
|
119 |
+
fetchAndDisplayLeaderboard('private');
|
120 |
+
}
|
121 |
+
|
122 |
+
function fetchAndDisplaySubmissions() {
|
123 |
+
const apiEndpoint = '/my_submissions';
|
124 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
125 |
+
articleLoadingSpinner.classList.remove('hidden');
|
126 |
+
|
127 |
+
const requestOptions = {
|
128 |
+
method: 'POST',
|
129 |
+
headers: {
|
130 |
+
'Content-Type': 'application/json',
|
131 |
+
}
|
132 |
+
};
|
133 |
+
|
134 |
+
fetch(apiEndpoint, requestOptions)
|
135 |
+
.then(response => {
|
136 |
+
if (!response.ok) {
|
137 |
+
throw new Error('Network response was not ok');
|
138 |
+
}
|
139 |
+
return response.json();
|
140 |
+
})
|
141 |
+
.then(data => {
|
142 |
+
const contentDiv = document.getElementById('content');
|
143 |
+
// console.log(data.response.submissions);
|
144 |
+
// contentDiv.innerHTML = marked.parse(data.response.submission_text) + data.response.submissions;
|
145 |
+
if (data.response.submissions && data.response.submissions.length > 0 && data.response.error.length == 0) {
|
146 |
+
// Start building the table HTML
|
147 |
+
let tableHTML = `
|
148 |
+
<div class="flex items-center">
|
149 |
+
<input type="text" name="team_name" id="team_name" class="mt-1 mb-1 block me-2" value="${data.response.team_name}">
|
150 |
+
<button id="updateTeamNameButton" type="button" class="confirm text-white bg-green-600 hover:bg-green-800 focus:ring-4 focus:outline-none focus:ring-green-300 font-medium rounded-lg text-sm inline-flex items-center px-5 py-2.5 text-center me-2">Update Team Name</button>
|
151 |
+
</div>`;
|
152 |
+
tableHTML += '<table border="1"><tr><th>Datetime</th><th>Submission ID</th><th>Public Score</th><th>Submission Comment</th><th>Selected</th><th>Status</th></tr>';
|
153 |
+
|
154 |
+
// Iterate over each submission and add it to the table
|
155 |
+
data.response.submissions.forEach(submission => {
|
156 |
+
tableHTML += `<tr>
|
157 |
+
<td>${submission.datetime}</td>
|
158 |
+
<td>${submission.submission_id}</td>
|
159 |
+
<td>${submission.public_score}</td>
|
160 |
+
<td>${submission.submission_comment}</td>
|
161 |
+
<td><input type="checkbox" name="selectedSubmissions" value="${submission.submission_id}" ${submission.selected ? 'checked' : ''}></td>
|
162 |
+
<td>${submission.status}</td>
|
163 |
+
</tr>`;
|
164 |
+
});
|
165 |
+
|
166 |
+
// Close the table HTML and set it as the content
|
167 |
+
tableHTML += '</table>';
|
168 |
+
tableHTML += '<button id="updateSelectedSubmissionsButton" type="button" class="confirm text-white bg-green-600 hover:bg-green-800 focus:ring-4 focus:outline-none focus:ring-green-300 font-medium rounded-lg text-sm inline-flex items-center px-5 py-2.5 text-center me-2">Update Selected Submissions</button>';
|
169 |
+
// add a text field which displays team name and a button to update team name
|
170 |
+
contentDiv.innerHTML = marked.parse(data.response.submission_text) + tableHTML;
|
171 |
+
document.getElementById('updateSelectedSubmissionsButton').addEventListener('click', function () {
|
172 |
+
updateSelectedSubmissions();
|
173 |
+
});
|
174 |
+
document.getElementById('updateTeamNameButton').addEventListener('click', function () {
|
175 |
+
updateTeamName();
|
176 |
+
});
|
177 |
+
} else {
|
178 |
+
// Display message if there are no submissions
|
179 |
+
contentDiv.innerHTML = marked.parse(data.response.submission_text) + marked.parse(data.response.error);
|
180 |
+
}
|
181 |
+
articleLoadingSpinner.classList.add('hidden');
|
182 |
+
})
|
183 |
+
.catch(error => {
|
184 |
+
console.error('There was a problem with the fetch operation:', error);
|
185 |
+
articleLoadingSpinner.classList.add('hidden');
|
186 |
+
});
|
187 |
+
}
|
188 |
+
|
189 |
+
function fetchAndDisplaySubmissionInfo() {
|
190 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
191 |
+
articleLoadingSpinner.classList.remove('hidden');
|
192 |
+
fetch('/submission_info')
|
193 |
+
.then(response => {
|
194 |
+
if (!response.ok) {
|
195 |
+
throw new Error('Network response was not ok');
|
196 |
+
}
|
197 |
+
return response.json(); // Parse the JSON response
|
198 |
+
})
|
199 |
+
.then(data => {
|
200 |
+
// Populate the 'content' div with the HTML from the response
|
201 |
+
const contentDiv = document.getElementById('content');
|
202 |
+
contentDiv.innerHTML = marked.parse(data.response);
|
203 |
+
addTargetBlankToLinks();
|
204 |
+
articleLoadingSpinner.classList.add('hidden');
|
205 |
+
})
|
206 |
+
.catch(error => {
|
207 |
+
console.error('There has been a problem with your fetch operation:', error);
|
208 |
+
articleLoadingSpinner.classList.add('hidden');
|
209 |
+
});
|
210 |
+
}
|
211 |
+
|
212 |
+
function fetchAndDisplayTeamInfo() {
|
213 |
+
const apiEndpoint = '/team_info';
|
214 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
215 |
+
articleLoadingSpinner.classList.remove('hidden');
|
216 |
+
|
217 |
+
const requestOptions = {
|
218 |
+
method: 'POST',
|
219 |
+
headers: {
|
220 |
+
'Content-Type': 'application/json',
|
221 |
+
}
|
222 |
+
};
|
223 |
+
fetch(apiEndpoint, requestOptions)
|
224 |
+
.then(response => {
|
225 |
+
if (!response.ok) {
|
226 |
+
throw new Error('Network response was not ok');
|
227 |
+
}
|
228 |
+
return response.json(); // Parse the JSON response
|
229 |
+
})
|
230 |
+
.then(data => {
|
231 |
+
// Populate the 'content' div with the HTML from the response
|
232 |
+
const contentDiv = document.getElementById('content');
|
233 |
+
if (data.team_exists) {
|
234 |
+
contentHTML = "<h2>Team</h2>";
|
235 |
+
contentHTML += "<p>" + data.team_name + "</p>";
|
236 |
+
contentDiv.innerHTML = marked.parse(contentHTML);
|
237 |
+
} else {
|
238 |
+
contentDiv.innerHTML = marked.parse(data.response);
|
239 |
+
}
|
240 |
+
contentDiv.innerHTML = marked.parse(data.response);
|
241 |
+
articleLoadingSpinner.classList.add('hidden');
|
242 |
+
})
|
243 |
+
.catch(error => {
|
244 |
+
console.error('There has been a problem with your fetch operation:', error);
|
245 |
+
articleLoadingSpinner.classList.add('hidden');
|
246 |
+
});
|
247 |
+
}
|
248 |
+
|
249 |
+
function fetchAndDisplayRules() {
|
250 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
251 |
+
articleLoadingSpinner.classList.remove('hidden');
|
252 |
+
fetch('/rules')
|
253 |
+
.then(response => {
|
254 |
+
if (!response.ok) {
|
255 |
+
throw new Error('Network response was not ok');
|
256 |
+
}
|
257 |
+
return response.json(); // Parse the JSON response
|
258 |
+
})
|
259 |
+
.then(data => {
|
260 |
+
// Populate the 'content' div with the HTML from the response
|
261 |
+
const contentDiv = document.getElementById('content');
|
262 |
+
contentDiv.innerHTML = marked.parse(data.response);
|
263 |
+
addTargetBlankToLinks();
|
264 |
+
articleLoadingSpinner.classList.add('hidden');
|
265 |
+
})
|
266 |
+
.catch(error => {
|
267 |
+
console.error('There has been a problem with your fetch operation:', error);
|
268 |
+
articleLoadingSpinner.classList.add('hidden');
|
269 |
+
});
|
270 |
+
}
|
271 |
+
|
272 |
+
const homeLink = document.getElementById('home');
|
273 |
+
const datasetLink = document.getElementById('dataset');
|
274 |
+
const publicLBLink = document.getElementById('public_lb');
|
275 |
+
const privateLBLink = document.getElementById('private_lb');
|
276 |
+
const newSubmission = document.getElementById('new_submission');
|
277 |
+
const mySubmissions = document.getElementById('my_submissions');
|
278 |
+
const submissionInfo = document.getElementById('submission_info');
|
279 |
+
const rulesLink = document.getElementById('rules');
|
280 |
+
|
281 |
+
// Add a click event listener to the 'Home' link
|
282 |
+
homeLink.addEventListener('click', function (event) {
|
283 |
+
event.preventDefault(); // Prevent the default link behavior
|
284 |
+
fetchAndDisplayCompetitionInfo(); // Fetch and display info on click
|
285 |
+
});
|
286 |
+
|
287 |
+
datasetLink.addEventListener('click', function (event) {
|
288 |
+
event.preventDefault(); // Prevent the default link behavior
|
289 |
+
fetchAndDisplayDatasetInfo(); // Fetch and display info on click
|
290 |
+
});
|
291 |
+
|
292 |
+
publicLBLink.addEventListener('click', function (event) {
|
293 |
+
event.preventDefault(); // Prevent the default link behavior
|
294 |
+
fetchAndDisplayPublicLeaderboard(); // Fetch and display info on click
|
295 |
+
});
|
296 |
+
|
297 |
+
privateLBLink.addEventListener('click', function (event) {
|
298 |
+
event.preventDefault(); // Prevent the default link behavior
|
299 |
+
fetchAndDisplayPrivateLeaderboard(); // Fetch and display info on click
|
300 |
+
});
|
301 |
+
|
302 |
+
newSubmission.addEventListener('click', function (event) {
|
303 |
+
event.preventDefault(); // Prevent the default link behavior
|
304 |
+
showSubmissionModal(); // Fetch and display info on click
|
305 |
+
});
|
306 |
+
mySubmissions.addEventListener('click', function (event) {
|
307 |
+
event.preventDefault(); // Prevent the default link behavior
|
308 |
+
fetchAndDisplaySubmissions(); // Fetch and display info on click
|
309 |
+
});
|
310 |
+
submissionInfo.addEventListener('click', function (event) {
|
311 |
+
event.preventDefault(); // Prevent the default link behavior
|
312 |
+
fetchAndDisplaySubmissionInfo(); // Fetch and display info on click
|
313 |
+
});
|
314 |
+
rulesLink.addEventListener('click', function (event) {
|
315 |
+
event.preventDefault(); // Prevent the default link behavior
|
316 |
+
fetchAndDisplayRules(); // Fetch and display info on click
|
317 |
+
});
|
318 |
+
|
319 |
+
|
320 |
+
// Fetch and display info when the page loads
|
321 |
+
fetchAndDisplayCompetitionInfo();
|
322 |
+
|
323 |
+
document.querySelector('#submission-modal .cancel').addEventListener('click', function () {
|
324 |
+
hideSubmissionModal();
|
325 |
+
});
|
326 |
+
});
|
327 |
+
|
328 |
+
</script>
|
329 |
+
|
330 |
+
<script>
|
331 |
+
function makeApiRequest(url, callback) {
|
332 |
+
var xhr = new XMLHttpRequest();
|
333 |
+
xhr.open("GET", url, true);
|
334 |
+
xhr.onreadystatechange = function () {
|
335 |
+
if (xhr.readyState === 4 && xhr.status === 200) {
|
336 |
+
var response = JSON.parse(xhr.responseText);
|
337 |
+
callback(response.response);
|
338 |
+
}
|
339 |
+
};
|
340 |
+
xhr.send();
|
341 |
+
}
|
342 |
+
|
343 |
+
function checkOAuth() {
|
344 |
+
var url = "/login_status";
|
345 |
+
makeApiRequest(url, function (response) {
|
346 |
+
if (response === 1) {
|
347 |
+
document.getElementById("loginButton").style.display = "block";
|
348 |
+
document.getElementById("logoutButton").style.display = "none";
|
349 |
+
} else if (response === 2) {
|
350 |
+
document.getElementById("loginButton").style.display = "none";
|
351 |
+
document.getElementById("logoutButton").style.display = "block";
|
352 |
+
}
|
353 |
+
});
|
354 |
+
}
|
355 |
+
window.onload = checkOAuth;
|
356 |
+
</script>
|
357 |
+
</head>
|
358 |
+
|
359 |
+
<body class="flex h-screen">
|
360 |
+
<!-- Sidebar -->
|
361 |
+
<aside id="sidebar-multi-level-sidebar"
|
362 |
+
class="fixed top-0 left-0 z-40 w-64 h-screen transition-transform -translate-x-full sm:translate-x-0"
|
363 |
+
aria-label="Sidebar">
|
364 |
+
<div class="h-full px-3 py-4 overflow-y-auto">
|
365 |
+
<ul class="space-y-2 font-medium">
|
366 |
+
<li>
|
367 |
+
<a href="#" id="home"
|
368 |
+
class="flex items-center p-2 text-gray-900 rounded-lg hover:bg-gray-100 group">
|
369 |
+
<svg class="w-5 h-5 text-gray-500 transition duration-75 group-hover:text-gray-900"
|
370 |
+
viewBox="0 0 22 21" xmlns="http://www.w3.org/2000/svg" fill="currentColor">
|
371 |
+
<path d="M1,10 L11,1 L21,10 L21,20 L1,20 Z" /> <!-- House structure -->
|
372 |
+
<path d="M6,20 L6,14 L16,14 L16,20" /> <!-- Door -->
|
373 |
+
</svg>
|
374 |
+
|
375 |
+
<span class="ms-3">Home</span>
|
376 |
+
</a>
|
377 |
+
</li>
|
378 |
+
<li>
|
379 |
+
<a href="#" id="dataset"
|
380 |
+
class="flex items-center p-2 text-gray-900 rounded-lg hover:bg-gray-100 group">
|
381 |
+
<svg class="flex-shrink-0 w-5 h-5 text-gray-500 transition duration-75 group-hover:text-gray-900"
|
382 |
+
aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor"
|
383 |
+
viewBox="0 0 18 18">
|
384 |
+
<path
|
385 |
+
d="M6.143 0H1.857A1.857 1.857 0 0 0 0 1.857v4.286C0 7.169.831 8 1.857 8h4.286A1.857 1.857 0 0 0 8 6.143V1.857A1.857 1.857 0 0 0 6.143 0Zm10 0h-4.286A1.857 1.857 0 0 0 10 1.857v4.286C10 7.169 10.831 8 11.857 8h4.286A1.857 1.857 0 0 0 18 6.143V1.857A1.857 1.857 0 0 0 16.143 0Zm-10 10H1.857A1.857 1.857 0 0 0 0 11.857v4.286C0 17.169.831 18 1.857 18h4.286A1.857 1.857 0 0 0 8 16.143v-4.286A1.857 1.857 0 0 0 6.143 10Zm10 0h-4.286A1.857 1.857 0 0 0 10 11.857v4.286c0 1.026.831 1.857 1.857 1.857h4.286A1.857 1.857 0 0 0 18 16.143v-4.286A1.857 1.857 0 0 0 16.143 10Z" />
|
386 |
+
</svg>
|
387 |
+
<span class="flex-1 ms-3 whitespace-nowrap">Dataset</span>
|
388 |
+
</a>
|
389 |
+
</li>
|
390 |
+
{% if rules_available %}
|
391 |
+
<li>
|
392 |
+
<a href="#" id="rules"
|
393 |
+
class="flex items-center p-2 text-gray-900 rounded-lg hover:bg-gray-100 group">
|
394 |
+
<svg class="flex-shrink-0 w-5 h-5 text-gray-500 transition duration-75 group-hover:text-gray-900"
|
395 |
+
aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor"
|
396 |
+
viewBox="0 0 24 24">
|
397 |
+
<path
|
398 |
+
d="M3 0h18v2H3V0zm0 4h18v2H3V4zm0 4h18v2H3V8zm0 4h18v2H3v-2zm0 4h18v2H3v-2zm0 4h18v2H3v-2z" />
|
399 |
+
</svg>
|
400 |
+
<span class="flex-1 ms-3 whitespace-nowrap">Rules</span>
|
401 |
+
</a>
|
402 |
+
</li>
|
403 |
+
{% else %}
|
404 |
+
<span id="rules"></span>
|
405 |
+
{% endif %}
|
406 |
+
<li>
|
407 |
+
<button type="button"
|
408 |
+
class="flex items-center w-full p-2 text-base text-gray-900 transition duration-75 rounded-lg group hover:bg-gray-100"
|
409 |
+
aria-controls="lb-dropdown" data-collapse-toggle="lb-dropdown">
|
410 |
+
<svg class="flex-shrink-0 w-5 h-5 text-gray-500 transition duration-75 group-hover:text-gray-900"
|
411 |
+
aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor"
|
412 |
+
viewBox="0 0 18 21">
|
413 |
+
<path d="M2,4 L20,4 L20,16 L2,16 Z" />
|
414 |
+
<path d="M6,17 L16,17 L16,18 L6,18 Z" />
|
415 |
+
</svg>
|
416 |
+
<span class="flex-1 ms-3 text-left rtl:text-right whitespace-nowrap">Leaderboard</span>
|
417 |
+
<svg class="w-3 h-3" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none"
|
418 |
+
viewBox="0 0 10 6">
|
419 |
+
<path stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
|
420 |
+
d="m1 1 4 4 4-4" />
|
421 |
+
</svg>
|
422 |
+
</button>
|
423 |
+
<ul id="lb-dropdown" class="py-2 space-y-2">
|
424 |
+
<li>
|
425 |
+
<a href="#" id="public_lb"
|
426 |
+
class="flex items-center w-full p-2 text-gray-900 transition duration-75 rounded-lg pl-11 group hover:bg-gray-100">Public</a>
|
427 |
+
</li>
|
428 |
+
<li>
|
429 |
+
<a href="#" id="private_lb"
|
430 |
+
class="flex items-center w-full p-2 text-gray-900 transition duration-75 rounded-lg pl-11 group hover:bg-gray-100">Private</a>
|
431 |
+
</li>
|
432 |
+
</ul>
|
433 |
+
</li>
|
434 |
+
<li>
|
435 |
+
<button type="button"
|
436 |
+
class="flex items-center w-full p-2 text-base text-gray-900 transition duration-75 rounded-lg group hover:bg-gray-100"
|
437 |
+
aria-controls="submissions-dropdown" data-collapse-toggle="submissions-dropdown">
|
438 |
+
<svg class="flex-shrink-0 w-5 h-5 text-gray-500 transition duration-75 group-hover:text-gray-900"
|
439 |
+
aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor"
|
440 |
+
viewBox="0 0 20 20">
|
441 |
+
<path d="M5 5V.13a2.96 2.96 0 0 0-1.293.749L.879 3.707A2.96 2.96 0 0 0 .13 5H5Z" />
|
442 |
+
<path
|
443 |
+
d="M6.737 11.061a2.961 2.961 0 0 1 .81-1.515l6.117-6.116A4.839 4.839 0 0 1 16 2.141V2a1.97 1.97 0 0 0-1.933-2H7v5a2 2 0 0 1-2 2H0v11a1.969 1.969 0 0 0 1.933 2h12.134A1.97 1.97 0 0 0 16 18v-3.093l-1.546 1.546c-.413.413-.94.695-1.513.81l-3.4.679a2.947 2.947 0 0 1-1.85-.227 2.96 2.96 0 0 1-1.635-3.257l.681-3.397Z" />
|
444 |
+
<path
|
445 |
+
d="M8.961 16a.93.93 0 0 0 .189-.019l3.4-.679a.961.961 0 0 0 .49-.263l6.118-6.117a2.884 2.884 0 0 0-4.079-4.078l-6.117 6.117a.96.96 0 0 0-.263.491l-.679 3.4A.961.961 0 0 0 8.961 16Zm7.477-9.8a.958.958 0 0 1 .68-.281.961.961 0 0 1 .682 1.644l-.315.315-1.36-1.36.313-.318Zm-5.911 5.911 4.236-4.236 1.359 1.359-4.236 4.237-1.7.339.341-1.699Z" />
|
446 |
+
</svg>
|
447 |
+
<span class="flex-1 ms-3 text-left rtl:text-right whitespace-nowrap">Submissions</span>
|
448 |
+
<svg class="w-3 h-3" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none"
|
449 |
+
viewBox="0 0 10 6">
|
450 |
+
<path stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
|
451 |
+
d="m1 1 4 4 4-4" />
|
452 |
+
</svg>
|
453 |
+
</button>
|
454 |
+
<ul id="submissions-dropdown" class="py-2 space-y-2">
|
455 |
+
<li>
|
456 |
+
<a href="#" id="submission_info"
|
457 |
+
class="flex items-center w-full p-2 text-gray-900 transition duration-75 rounded-lg pl-11 group hover:bg-gray-100">Submission
|
458 |
+
information</a>
|
459 |
+
</li>
|
460 |
+
<li>
|
461 |
+
<a href="#" id="my_submissions"
|
462 |
+
class="flex items-center w-full p-2 text-gray-900 transition duration-75 rounded-lg pl-11 group hover:bg-gray-100">My
|
463 |
+
submissions</a>
|
464 |
+
</li>
|
465 |
+
<li>
|
466 |
+
<a href="#" id="new_submission"
|
467 |
+
class="flex items-center w-full p-2 text-gray-900 transition duration-75 rounded-lg pl-11 group hover:bg-gray-100">New
|
468 |
+
submission</a>
|
469 |
+
</li>
|
470 |
+
</ul>
|
471 |
+
</li>
|
472 |
+
<li>
|
473 |
+
<a href="#" id="admin"
|
474 |
+
class="flex items-center p-2 text-gray-900 rounded-lg hover:bg-gray-100 group">
|
475 |
+
<svg class="flex-shrink-0 w-5 h-5 text-gray-500 transition duration-75 group-hover:text-gray-900"
|
476 |
+
aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="currentColor"
|
477 |
+
viewBox="0 0 24 24">
|
478 |
+
<path
|
479 |
+
d="M12 15.5c-1.93 0-3.5-1.57-3.5-3.5s1.57-3.5 3.5-3.5 3.5 1.57 3.5 3.5-1.57 3.5-3.5 3.5zm7.43-3.5c.04-.33.07-.66.07-1s-.03-.67-.07-1l2.11-1.65c.19-.15.23-.42.12-.63l-2-3.46c-.11-.21-.35-.3-.57-.24l-2.49 1c-.52-.4-1.08-.73-1.69-.98l-.38-2.65C14.57 2.18 14.3 2 14 2h-4c-.3 0-.57.18-.64.45L8.98 5.1c-.61.25-1.17.58-1.69.98l-2.49-1c-.22-.06-.46.03-.57.24l-2 3.46c-.11.21-.07.48.12.63l2.11 1.65c-.04.33-.07.66-.07 1s.03.67.07 1L2.46 14.1c-.19.15-.23.42-.12.63l2 3.46c.11.21.35.3.57.24l2.49-1c.52.4 1.08.73 1.69.98l.38 2.65c.07.27.34.45.64.45h4c.3 0 .57-.18.64-.45l.38-2.65c.61-.25 1.17-.58 1.69-.98l2.49 1c.22.06.46-.03.57-.24l2-3.46c.11-.21.07-.48-.12-.63l-2.11-1.65zM12 17c-2.76 0-5-2.24-5-5s2.24-5 5-5 5 2.24 5 5-2.24 5-5 5z" />
|
480 |
+
</svg>
|
481 |
+
<span class="flex-1 ms-3 whitespace-nowrap">Admin</span>
|
482 |
+
</a>
|
483 |
+
</li>
|
484 |
+
<li id="loginButton" style="display: none;">
|
485 |
+
<a href="/login/huggingface"
|
486 |
+
class="flex justify-center items-center bg-blue-400 hover:bg-blue-600 text-white text-center font-bold py-2 px-4 rounded transition duration-200 ease-in-out">
|
487 |
+
Login with Hugging Face
|
488 |
+
</a>
|
489 |
+
</li>
|
490 |
+
<li id="logoutButton" style="display: none;">
|
491 |
+
<a href="/logout"
|
492 |
+
class="flex justify-center items-center bg-red-400 hover:bg-red-600 text-white text-center font-bold py-2 px-4 rounded transition duration-200 ease-in-out">
|
493 |
+
Logout
|
494 |
+
</a>
|
495 |
+
</li>
|
496 |
+
</ul>
|
497 |
+
|
498 |
+
<footer>
|
499 |
+
<div class="w-full mx-auto max-w-screen-xl p-4 md:flex md:items-center md:justify-between">
|
500 |
+
<span class="text-sm text-gray-500 sm:text-center">Powered by <a
|
501 |
+
href="https://github.com/huggingface/competitions" target="_blank"
|
502 |
+
class="hover:underline">Hugging Face
|
503 |
+
Competitions</a>
|
504 |
+
</span>
|
505 |
+
</div>
|
506 |
+
<div class="text-center">
|
507 |
+
<span class="text-xs text-gray-400">{{version}}
|
508 |
+
</span>
|
509 |
+
</div>
|
510 |
+
</footer>
|
511 |
+
</div>
|
512 |
+
</aside>
|
513 |
+
<div class="p-1 sm:ml-64">
|
514 |
+
<img src={{logo}} alt="Competition logo">
|
515 |
+
<hr class="mt-3 mb-2">
|
516 |
+
<div id="articleLoadingSpinner" role="status"
|
517 |
+
class="hidden absolute -translate-x-1/2 -translate-y-1/2 top-2/4 left-1/2">
|
518 |
+
<div class="animate-spin rounded-full h-32 w-32 border-b-2 border-gray-900"></div>
|
519 |
+
<span class="sr-only">Loading...</span>
|
520 |
+
</div>
|
521 |
+
<article class="prose w-full mx-auto max-w-screen-xl p-4 md:flex md:items-center md:justify-between"
|
522 |
+
id="content">
|
523 |
+
</article>
|
524 |
+
</div>
|
525 |
+
<div id="submission-modal" tabindex="-1"
|
526 |
+
class="hidden overflow-y-auto overflow-x-hidden fixed top-0 right-0 left-0 z-50 justify-center items-center w-full md:inset-0 h-[calc(100%-1rem)] max-h-full">
|
527 |
+
<div id="loadingSpinner" role="status"
|
528 |
+
class="hidden absolute -translate-x-1/2 -translate-y-1/2 top-2/4 left-1/2">
|
529 |
+
<div class="animate-spin rounded-full h-32 w-32 border-b-2 border-gray-900"></div>
|
530 |
+
<span class="sr-only">Loading...</span>
|
531 |
+
</div>
|
532 |
+
<div class="form-container max-w-5xl mx-auto mt-3 p-6 shadow-2xl bg-white">
|
533 |
+
<p class="text-lg font-medium text-gray-900">New Submission</p>
|
534 |
+
<form action="#" method="post" class="gap-2" enctype="multipart/form-data">
|
535 |
+
{% if competition_type == 'generic' %}
|
536 |
+
<div class="form-group">
|
537 |
+
<label class="block mb-2 text-sm font-medium text-gray-900" for="submission_file">Upload
|
538 |
+
file</label>
|
539 |
+
<input
|
540 |
+
class="block w-full text-sm text-gray-900 border border-gray-300 rounded-lg cursor-pointer bg-gray-50 focus:outline-none "
|
541 |
+
id="submission_file" type="file" name="submission_file">
|
542 |
+
</div>
|
543 |
+
{% endif %}
|
544 |
+
{% if competition_type == 'script' %}
|
545 |
+
<div class="form-group">
|
546 |
+
<label for="hub_model" class="text-sm font-medium text-gray-700">Hub model
|
547 |
+
</label>
|
548 |
+
<input type="text" name="hub_model" id="hub_model"
|
549 |
+
class="mt-1 block w-full border border-gray-300 px-3 py-1.5 bg-white rounded-md shadow-sm focus:outline-none focus:ring-indigo-500 focus:border-indigo-500"
|
550 |
+
placeholder="username/my-model">
|
551 |
+
</div>
|
552 |
+
{% endif %}
|
553 |
+
<div class="form-group mt-2">
|
554 |
+
<label for="submission_comment" class="text-sm font-medium text-gray-700">Submission description
|
555 |
+
(optional)
|
556 |
+
</label>
|
557 |
+
<textarea id="submission_comment" name="submission_comment" rows="5"
|
558 |
+
class="p-2.5 w-full text-sm text-gray-900" placeholder=" "></textarea>
|
559 |
+
</div>
|
560 |
+
<div class="form-actions mt-6">
|
561 |
+
<button data-modal-hide="submission-modal" type="button"
|
562 |
+
class="confirm text-white bg-green-600 hover:bg-green-800 focus:ring-4 focus:outline-none focus:ring-green-300font-medium rounded-lg text-sm inline-flex items-center px-5 py-2.5 text-center me-2">
|
563 |
+
Submit
|
564 |
+
</button>
|
565 |
+
<button data-modal-hide="submission-modal" type="button"
|
566 |
+
class="cancel text-white bg-red-600 hover:bg-red-100 focus:ring-4 focus:outline-none focus:ring-red-200 rounded-lg border border-red-200 text-sm font-medium px-5 py-2.5 hover:text-red-900 focus:z-10">Cancel</button>
|
567 |
+
</div>
|
568 |
+
</form>
|
569 |
+
<hr class="mt-3">
|
570 |
+
<div id="error-message" style="color: red;"></div>
|
571 |
+
<div id="success-message" style="color: green;"></div>
|
572 |
+
</div>
|
573 |
+
</div>
|
574 |
+
<div id="admin-modal" tabindex="-1"
|
575 |
+
class="hidden fixed inset-0 z-40 flex items-center justify-center w-full h-full bg-black bg-opacity-50">
|
576 |
+
<div id="adminLoadingSpinner" role="status"
|
577 |
+
class="hidden fixed inset-0 z-50 flex items-center justify-center bg-black bg-opacity-50">
|
578 |
+
<div class="animate-spin rounded-full h-32 w-32 border-b-2 border-gray-900"></div>
|
579 |
+
<span class="sr-only">Loading...</span>
|
580 |
+
</div>
|
581 |
+
<div class="relative w-full max-w-5xl p-4">
|
582 |
+
<div class="relative bg-white rounded-lg shadow-2xl">
|
583 |
+
<button type="button"
|
584 |
+
class="absolute top-3 right-3 text-gray-400 bg-transparent hover:bg-gray-200 hover:text-gray-900 rounded-lg text-sm w-8 h-8 inline-flex justify-center items-center"
|
585 |
+
data-modal-hide="admin-modal">
|
586 |
+
<svg class="w-4 h-4" aria-hidden="true" xmlns="http://www.w3.org/2000/svg" fill="none"
|
587 |
+
viewBox="0 0 14 14">
|
588 |
+
<path stroke="currentColor" stroke-linecap="round" stroke-linejoin="round" stroke-width="2"
|
589 |
+
d="m1 1 6 6m0 0 6 6M7 7l6-6M7 7l-6 6" />
|
590 |
+
</svg>
|
591 |
+
<span class="sr-only">Close</span>
|
592 |
+
</button>
|
593 |
+
<div class="p-6 md:p-8 text-center">
|
594 |
+
<h3 class="mb-5 text-lg font-medium text-gray-900">Admin</h3>
|
595 |
+
<div class="tabs">
|
596 |
+
<ul class="flex border-b">
|
597 |
+
<li class="mr-1">
|
598 |
+
<a class="tab bg-white inline-block py-2 px-4 text-blue-500 hover:text-blue-800 font-semibold"
|
599 |
+
href="#config">Config</a>
|
600 |
+
</li>
|
601 |
+
<li class="mr-1">
|
602 |
+
<a class="tab bg-white inline-block py-2 px-4 text-blue-500 hover:text-blue-800 font-semibold"
|
603 |
+
href="#competition-desc">Competition Desc</a>
|
604 |
+
</li>
|
605 |
+
<li class="mr-1">
|
606 |
+
<a class="tab bg-white inline-block py-2 px-4 text-blue-500 hover:text-blue-800 font-semibold"
|
607 |
+
href="#dataset-desc">Dataset Desc</a>
|
608 |
+
</li>
|
609 |
+
<li class="mr-1">
|
610 |
+
<a class="tab bg-white inline-block py-2 px-4 text-blue-500 hover:text-blue-800 font-semibold"
|
611 |
+
href="#submission-desc">Submission Desc</a>
|
612 |
+
</li>
|
613 |
+
<li class="mr-1">
|
614 |
+
<a class="tab bg-white inline-block py-2 px-4 text-blue-500 hover:text-blue-800 font-semibold"
|
615 |
+
href="#rules-desc">Rules</a>
|
616 |
+
</li>
|
617 |
+
</ul>
|
618 |
+
</div>
|
619 |
+
<div id="tab-contents"
|
620 |
+
class="text-xs font-normal text-left overflow-y-auto max-h-[calc(100vh-400px)] border-t border-gray-200 pt-4">
|
621 |
+
<div id="config">
|
622 |
+
<textarea id="config-textarea" class="w-full h-64 p-2 border rounded">Loading..</textarea>
|
623 |
+
<p class="text-xs text-gray-500">Note: The config should be a valid JSON object. To learn
|
624 |
+
details about entries, click <a
|
625 |
+
href="https://huggingface.co/docs/competitions/competition_repo#confjson"
|
626 |
+
target="_blank">here</a>.
|
627 |
+
</p>
|
628 |
+
</div>
|
629 |
+
<div id="competition-desc" class="hidden">
|
630 |
+
<textarea id="competition-desc-textarea"
|
631 |
+
class="w-full h-64 p-2 border rounded">Loading..</textarea>
|
632 |
+
</div>
|
633 |
+
<div id="dataset-desc" class="hidden">
|
634 |
+
<textarea id="dataset-desc-textarea"
|
635 |
+
class="w-full h-64 p-2 border rounded">Loading..</textarea>
|
636 |
+
</div>
|
637 |
+
<div id="submission-desc" class="hidden">
|
638 |
+
<textarea id="submission-desc-textarea"
|
639 |
+
class="w-full h-64 p-2 border rounded">Loading..</textarea>
|
640 |
+
</div>
|
641 |
+
<div id="rules-desc" class="hidden">
|
642 |
+
<textarea id="rules-desc-textarea"
|
643 |
+
class="w-full h-64 p-2 border rounded">Loading..</textarea>
|
644 |
+
</div>
|
645 |
+
</div>
|
646 |
+
<button id="save-button" class="mt-4 px-4 py-2 bg-blue-500 text-white rounded hover:bg-blue-700">
|
647 |
+
Save
|
648 |
+
</button>
|
649 |
+
</div>
|
650 |
+
</div>
|
651 |
+
</div>
|
652 |
+
</div>
|
653 |
+
<script src="https://cdnjs.cloudflare.com/ajax/libs/flowbite/2.2.1/flowbite.min.js"></script>
|
654 |
+
<script>
|
655 |
+
document.addEventListener("DOMContentLoaded", function () {
|
656 |
+
const content = document.getElementById('content');
|
657 |
+
const links = content.getElementsByTagName('a');
|
658 |
+
|
659 |
+
for (let i = 0; i < links.length; i++) {
|
660 |
+
if (!links[i].hasAttribute('target')) {
|
661 |
+
links[i].setAttribute('target', '_blank');
|
662 |
+
}
|
663 |
+
}
|
664 |
+
});
|
665 |
+
</script>
|
666 |
+
</body>
|
667 |
+
|
668 |
+
<script>
|
669 |
+
document.addEventListener("DOMContentLoaded", function () {
|
670 |
+
document.querySelectorAll('.tabs a').forEach(tab => {
|
671 |
+
tab.addEventListener('click', event => {
|
672 |
+
event.preventDefault();
|
673 |
+
document.querySelectorAll('.tabs a').forEach(t => t.classList.remove('active'));
|
674 |
+
tab.classList.add('active');
|
675 |
+
|
676 |
+
document.querySelectorAll('#tab-contents > div').forEach(content => content.classList.add('hidden'));
|
677 |
+
const selectedTab = document.querySelector(tab.getAttribute('href'));
|
678 |
+
selectedTab.classList.remove('hidden');
|
679 |
+
});
|
680 |
+
});
|
681 |
+
|
682 |
+
async function fetchAdminCompInfo() {
|
683 |
+
const adminLoadingSpinner = document.getElementById('adminLoadingSpinner');
|
684 |
+
adminLoadingSpinner.classList.remove('hidden');
|
685 |
+
try {
|
686 |
+
const response = await fetch("/admin/comp_info", {
|
687 |
+
method: "POST",
|
688 |
+
headers: {
|
689 |
+
"Content-Type": "application/json"
|
690 |
+
}
|
691 |
+
});
|
692 |
+
const data = await response.json();
|
693 |
+
if (response.ok) {
|
694 |
+
populateAdminModal(data.response);
|
695 |
+
} else {
|
696 |
+
alert(data.response || "Failed to fetch competition info");
|
697 |
+
}
|
698 |
+
} catch (error) {
|
699 |
+
console.error("Error fetching admin competition info:", error);
|
700 |
+
alert("An error occurred while fetching competition info.");
|
701 |
+
} finally {
|
702 |
+
adminLoadingSpinner.classList.add('hidden');
|
703 |
+
}
|
704 |
+
}
|
705 |
+
|
706 |
+
function populateAdminModal(data) {
|
707 |
+
document.getElementById("config-textarea").value = JSON.stringify(data.config, null, 2);
|
708 |
+
document.getElementById("competition-desc-textarea").value = data.markdowns["competition_desc"] || "";
|
709 |
+
document.getElementById("dataset-desc-textarea").value = data.markdowns["dataset_desc"] || "";
|
710 |
+
document.getElementById("submission-desc-textarea").value = data.markdowns["submission_desc"] || "";
|
711 |
+
document.getElementById("rules-desc-textarea").value = data.markdowns["rules"] || "No rules available.";
|
712 |
+
}
|
713 |
+
|
714 |
+
document.querySelectorAll(".tab").forEach(tab => {
|
715 |
+
tab.addEventListener("click", function (event) {
|
716 |
+
event.preventDefault();
|
717 |
+
const targetId = this.getAttribute("href").substring(1);
|
718 |
+
|
719 |
+
document.querySelectorAll("#tab-contents > div").forEach(content => {
|
720 |
+
content.classList.add("hidden");
|
721 |
+
});
|
722 |
+
document.getElementById(targetId).classList.remove("hidden");
|
723 |
+
|
724 |
+
document.querySelectorAll(".tab").forEach(t => {
|
725 |
+
t.classList.remove("text-blue-800");
|
726 |
+
t.classList.add("text-blue-500");
|
727 |
+
});
|
728 |
+
this.classList.remove("text-blue-500");
|
729 |
+
this.classList.add("text-blue-800");
|
730 |
+
});
|
731 |
+
});
|
732 |
+
|
733 |
+
document.getElementById("admin").addEventListener("click", function () {
|
734 |
+
document.getElementById("admin-modal").classList.remove("hidden");
|
735 |
+
fetchAdminCompInfo();
|
736 |
+
});
|
737 |
+
|
738 |
+
document.querySelector("[data-modal-hide='admin-modal']").addEventListener("click", function () {
|
739 |
+
document.getElementById("admin-modal").classList.add("hidden");
|
740 |
+
});
|
741 |
+
|
742 |
+
document.getElementById("save-button").addEventListener("click", async function () {
|
743 |
+
const adminLoadingSpinner = document.getElementById('adminLoadingSpinner');
|
744 |
+
adminLoadingSpinner.classList.remove('hidden');
|
745 |
+
|
746 |
+
const config = document.getElementById("config-textarea").value;
|
747 |
+
const competitionDesc = document.getElementById("competition-desc-textarea").value;
|
748 |
+
const datasetDesc = document.getElementById("dataset-desc-textarea").value;
|
749 |
+
const submissionDesc = document.getElementById("submission-desc-textarea").value;
|
750 |
+
const rulesDesc = document.getElementById("rules-desc-textarea").value;
|
751 |
+
|
752 |
+
const data = {
|
753 |
+
config: JSON.parse(config),
|
754 |
+
markdowns: {
|
755 |
+
competition_desc: competitionDesc,
|
756 |
+
dataset_desc: datasetDesc,
|
757 |
+
submission_desc: submissionDesc,
|
758 |
+
rules: rulesDesc
|
759 |
+
}
|
760 |
+
};
|
761 |
+
|
762 |
+
try {
|
763 |
+
const response = await fetch("/admin/update_comp_info", {
|
764 |
+
method: "POST",
|
765 |
+
headers: {
|
766 |
+
"Content-Type": "application/json"
|
767 |
+
},
|
768 |
+
body: JSON.stringify(data)
|
769 |
+
});
|
770 |
+
const result = await response.json();
|
771 |
+
if (response.ok) {
|
772 |
+
alert(result.response || "Successfully updated competition info");
|
773 |
+
} else {
|
774 |
+
alert(result.response || "Failed to update competition info");
|
775 |
+
}
|
776 |
+
} catch (error) {
|
777 |
+
console.error("Error updating competition info:", error);
|
778 |
+
alert("An error occurred while updating competition info.");
|
779 |
+
} finally {
|
780 |
+
adminLoadingSpinner.classList.add('hidden');
|
781 |
+
}
|
782 |
+
});
|
783 |
+
|
784 |
+
});
|
785 |
+
</script>
|
786 |
+
|
787 |
+
<script>
|
788 |
+
document.addEventListener('DOMContentLoaded', function () {
|
789 |
+
document.querySelector('.confirm').addEventListener('click', function (event) {
|
790 |
+
event.preventDefault();
|
791 |
+
document.getElementById('error-message').textContent = '';
|
792 |
+
document.getElementById('success-message').textContent = '';
|
793 |
+
const loadingSpinner = document.getElementById('loadingSpinner');
|
794 |
+
loadingSpinner.classList.remove('hidden');
|
795 |
+
|
796 |
+
var formData = new FormData();
|
797 |
+
var competitionType = '{{ competition_type }}';
|
798 |
+
|
799 |
+
if (competitionType === 'generic') {
|
800 |
+
var submissionFile = document.getElementById('submission_file').files[0];
|
801 |
+
formData.append('submission_file', submissionFile);
|
802 |
+
formData.append('hub_model', 'None');
|
803 |
+
} else if (competitionType === 'script') {
|
804 |
+
var hubModel = document.getElementById('hub_model').value;
|
805 |
+
if (!hubModel) {
|
806 |
+
alert('Hub model is required.');
|
807 |
+
return;
|
808 |
+
}
|
809 |
+
formData.append('hub_model', hubModel);
|
810 |
+
} else {
|
811 |
+
alert('Invalid competition type.');
|
812 |
+
return;
|
813 |
+
}
|
814 |
+
|
815 |
+
var submissionComment = document.getElementById('submission_comment').value;
|
816 |
+
formData.append('submission_comment', submissionComment);
|
817 |
+
|
818 |
+
fetch('/new_submission', {
|
819 |
+
method: 'POST',
|
820 |
+
body: formData
|
821 |
+
})
|
822 |
+
.then(response => response.json())
|
823 |
+
.then(data => {
|
824 |
+
loadingSpinner.classList.add('hidden');
|
825 |
+
document.getElementById('success-message').textContent = data.response;
|
826 |
+
|
827 |
+
})
|
828 |
+
.catch((error) => {
|
829 |
+
console.error('Error:', error);
|
830 |
+
loadingSpinner.classList.add('hidden');
|
831 |
+
document.getElementById('error-message').textContent = error;
|
832 |
+
});
|
833 |
+
});
|
834 |
+
});
|
835 |
+
</script>
|
836 |
+
|
837 |
+
<script>
|
838 |
+
function updateSelectedSubmissions() {
|
839 |
+
const selectedSubmissions = document.querySelectorAll('input[name="selectedSubmissions"]:checked');
|
840 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
841 |
+
articleLoadingSpinner.classList.remove('hidden');
|
842 |
+
let selectedSubmissionIds = [];
|
843 |
+
selectedSubmissions.forEach((submission) => {
|
844 |
+
selectedSubmissionIds.push(submission.value);
|
845 |
+
});
|
846 |
+
|
847 |
+
const updateEndpoint = '/update_selected_submissions';
|
848 |
+
const requestOptions = {
|
849 |
+
method: 'POST',
|
850 |
+
headers: {
|
851 |
+
'Content-Type': 'application/json',
|
852 |
+
},
|
853 |
+
body: JSON.stringify({
|
854 |
+
"submission_ids": selectedSubmissionIds.join(',')
|
855 |
+
})
|
856 |
+
};
|
857 |
+
|
858 |
+
fetch(updateEndpoint, requestOptions)
|
859 |
+
.then(response => {
|
860 |
+
if (!response.ok) {
|
861 |
+
throw new Error('Network response was not ok');
|
862 |
+
}
|
863 |
+
return response.json();
|
864 |
+
})
|
865 |
+
.then(data => {
|
866 |
+
if (data.success) {
|
867 |
+
// Optionally, display a success message or handle accordingly
|
868 |
+
console.log('Update successful');
|
869 |
+
articleLoadingSpinner.classList.add('hidden');
|
870 |
+
} else {
|
871 |
+
// Handle failure case
|
872 |
+
console.log('Update failed');
|
873 |
+
articleLoadingSpinner.classList.add('hidden');
|
874 |
+
alert(data.error);
|
875 |
+
}
|
876 |
+
// Refresh submissions display
|
877 |
+
fetchAndDisplaySubmissions();
|
878 |
+
})
|
879 |
+
.catch(error => {
|
880 |
+
console.error('There was a problem with the fetch operation for updating:', error);
|
881 |
+
});
|
882 |
+
}
|
883 |
+
</script>
|
884 |
+
|
885 |
+
<script>
|
886 |
+
function updateTeamName() {
|
887 |
+
const teamName = document.getElementById('team_name').value;
|
888 |
+
const articleLoadingSpinner = document.getElementById('articleLoadingSpinner');
|
889 |
+
articleLoadingSpinner.classList.remove('hidden');
|
890 |
+
|
891 |
+
const updateEndpoint = '/update_team_name';
|
892 |
+
const requestOptions = {
|
893 |
+
method: 'POST',
|
894 |
+
headers: {
|
895 |
+
'Content-Type': 'application/json',
|
896 |
+
},
|
897 |
+
body: JSON.stringify({
|
898 |
+
"new_team_name": teamName
|
899 |
+
})
|
900 |
+
};
|
901 |
+
|
902 |
+
fetch(updateEndpoint, requestOptions)
|
903 |
+
.then(response => {
|
904 |
+
if (!response.ok) {
|
905 |
+
throw new Error('Network response was not ok');
|
906 |
+
}
|
907 |
+
return response.json();
|
908 |
+
})
|
909 |
+
.then(data => {
|
910 |
+
if (data.success) {
|
911 |
+
// Optionally, display a success message or handle accordingly
|
912 |
+
console.log('Update successful');
|
913 |
+
articleLoadingSpinner.classList.add('hidden');
|
914 |
+
} else {
|
915 |
+
// Handle failure case
|
916 |
+
console.log('Update failed');
|
917 |
+
articleLoadingSpinner.classList.add('hidden');
|
918 |
+
alert(data.error);
|
919 |
+
}
|
920 |
+
// Refresh submissions display
|
921 |
+
fetchAndDisplaySubmissions();
|
922 |
+
})
|
923 |
+
.catch(error => {
|
924 |
+
console.error('There was a problem with the fetch operation for updating:', error);
|
925 |
+
});
|
926 |
+
}
|
927 |
+
</script>
|
928 |
+
|
929 |
+
<script>
|
930 |
+
function showAdminModal() {
|
931 |
+
const modal = document.getElementById('admin-modal');
|
932 |
+
modal.classList.add('flex');
|
933 |
+
modal.classList.remove('hidden');
|
934 |
+
}
|
935 |
+
|
936 |
+
function hideAdminModal() {
|
937 |
+
const modal = document.getElementById('admin-modal');
|
938 |
+
modal.classList.remove('flex');
|
939 |
+
modal.classList.add('hidden');
|
940 |
+
}
|
941 |
+
|
942 |
+
document.querySelector('#admin').addEventListener('click', function () {
|
943 |
+
showAdminModal();
|
944 |
+
});
|
945 |
+
|
946 |
+
document.querySelector('[data-modal-hide="admin-modal"]').addEventListener('click', function () {
|
947 |
+
hideAdminModal();
|
948 |
+
});
|
949 |
+
</script>
|
950 |
+
|
951 |
+
</html>
|
competitions/tests/test_dummy.py
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
def test_dummy():
|
2 |
+
assert 1 + 1 == 2
|
competitions/text.py
ADDED
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
SUBMISSION_TEXT = """### My Submissions\n\nYou can make upto {} submissions per day.
|
2 |
+
The test data has been divided into public and private splits.
|
3 |
+
Your score on the public split will be shown on the leaderboard.
|
4 |
+
Your final score will be based on your private split performance.
|
5 |
+
The final rankings will be based on the private split performance.
|
6 |
+
"""
|
7 |
+
|
8 |
+
SUBMISSION_ERROR = """Submission is not in a proper format.
|
9 |
+
Please check evaluation instructions for more details."""
|
10 |
+
|
11 |
+
SUBMISSION_SELECTION_TEXT = """\n\nYou can select upto {} submissions for private leaderboard."""
|
12 |
+
|
13 |
+
NO_SUBMISSIONS = """You have not made any submissions yet."""
|
14 |
+
|
15 |
+
SUBMISSION_SUCCESS = """Submission successful! You have {} submissions remaining today."""
|
16 |
+
|
17 |
+
SUBMISSION_LIMIT_REACHED = """You have reached your submission limit for today. Please try again tomorrow."""
|
competitions/utils.py
ADDED
@@ -0,0 +1,386 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import io
|
2 |
+
import json
|
3 |
+
import os
|
4 |
+
import shlex
|
5 |
+
import subprocess
|
6 |
+
import traceback
|
7 |
+
|
8 |
+
import requests
|
9 |
+
from fastapi import Request
|
10 |
+
from huggingface_hub import HfApi, hf_hub_download
|
11 |
+
from loguru import logger
|
12 |
+
|
13 |
+
from competitions.enums import SubmissionStatus
|
14 |
+
from competitions.params import EvalParams
|
15 |
+
|
16 |
+
from . import HF_URL
|
17 |
+
|
18 |
+
|
19 |
+
USER_TOKEN = os.environ.get("USER_TOKEN")
|
20 |
+
|
21 |
+
|
22 |
+
def token_information(token):
|
23 |
+
if token.startswith("hf_oauth"):
|
24 |
+
_api_url = HF_URL + "/oauth/userinfo"
|
25 |
+
else:
|
26 |
+
_api_url = HF_URL + "/api/whoami-v2"
|
27 |
+
headers = {}
|
28 |
+
cookies = {}
|
29 |
+
if token.startswith("hf_"):
|
30 |
+
headers["Authorization"] = f"Bearer {token}"
|
31 |
+
else:
|
32 |
+
cookies = {"token": token}
|
33 |
+
try:
|
34 |
+
response = requests.get(
|
35 |
+
_api_url,
|
36 |
+
headers=headers,
|
37 |
+
cookies=cookies,
|
38 |
+
timeout=3,
|
39 |
+
)
|
40 |
+
except (requests.Timeout, ConnectionError) as err:
|
41 |
+
logger.error(f"Failed to request whoami-v2 - {repr(err)}")
|
42 |
+
raise Exception("Hugging Face Hub is unreachable, please try again later.")
|
43 |
+
|
44 |
+
if response.status_code != 200:
|
45 |
+
logger.error(f"Failed to request whoami-v2 - {response.status_code}")
|
46 |
+
raise Exception("Invalid token.")
|
47 |
+
|
48 |
+
resp = response.json()
|
49 |
+
user_info = {}
|
50 |
+
|
51 |
+
if token.startswith("hf_oauth"):
|
52 |
+
user_info["id"] = resp["sub"]
|
53 |
+
user_info["name"] = resp["preferred_username"]
|
54 |
+
user_info["orgs"] = [resp["orgs"][k]["preferred_username"] for k in range(len(resp["orgs"]))]
|
55 |
+
else:
|
56 |
+
user_info["id"] = resp["id"]
|
57 |
+
user_info["name"] = resp["name"]
|
58 |
+
user_info["orgs"] = [resp["orgs"][k]["name"] for k in range(len(resp["orgs"]))]
|
59 |
+
return user_info
|
60 |
+
|
61 |
+
|
62 |
+
def user_authentication(request: Request):
|
63 |
+
auth_header = request.headers.get("Authorization")
|
64 |
+
bearer_token = None
|
65 |
+
|
66 |
+
if auth_header and auth_header.startswith("Bearer "):
|
67 |
+
bearer_token = auth_header.split(" ")[1]
|
68 |
+
|
69 |
+
if bearer_token:
|
70 |
+
try:
|
71 |
+
_ = token_information(token=bearer_token)
|
72 |
+
return bearer_token
|
73 |
+
except Exception as e:
|
74 |
+
logger.error(f"Failed to verify token: {e}")
|
75 |
+
return None
|
76 |
+
|
77 |
+
if USER_TOKEN is not None:
|
78 |
+
try:
|
79 |
+
_ = token_information(token=USER_TOKEN)
|
80 |
+
return USER_TOKEN
|
81 |
+
except Exception as e:
|
82 |
+
logger.error(f"Failed to verify token: {e}")
|
83 |
+
return None
|
84 |
+
|
85 |
+
if "oauth_info" in request.session:
|
86 |
+
try:
|
87 |
+
_ = token_information(token=request.session["oauth_info"]["access_token"])
|
88 |
+
return request.session["oauth_info"]["access_token"]
|
89 |
+
except Exception as e:
|
90 |
+
request.session.pop("oauth_info", None)
|
91 |
+
logger.error(f"Failed to verify token: {e}")
|
92 |
+
return None
|
93 |
+
|
94 |
+
return None
|
95 |
+
|
96 |
+
|
97 |
+
def user_authentication_dep(token, return_raw=False):
|
98 |
+
if token.startswith("hf_oauth"):
|
99 |
+
_api_url = HF_URL + "/oauth/userinfo"
|
100 |
+
else:
|
101 |
+
_api_url = HF_URL + "/api/whoami-v2"
|
102 |
+
headers = {}
|
103 |
+
cookies = {}
|
104 |
+
if token.startswith("hf_"):
|
105 |
+
headers["Authorization"] = f"Bearer {token}"
|
106 |
+
else:
|
107 |
+
cookies = {"token": token}
|
108 |
+
try:
|
109 |
+
response = requests.get(
|
110 |
+
_api_url,
|
111 |
+
headers=headers,
|
112 |
+
cookies=cookies,
|
113 |
+
timeout=3,
|
114 |
+
)
|
115 |
+
except (requests.Timeout, ConnectionError) as err:
|
116 |
+
logger.error(f"Failed to request whoami-v2 - {repr(err)}")
|
117 |
+
raise Exception("Hugging Face Hub is unreachable, please try again later.")
|
118 |
+
|
119 |
+
resp = response.json()
|
120 |
+
if return_raw:
|
121 |
+
return resp
|
122 |
+
|
123 |
+
user_info = {}
|
124 |
+
if "error" in resp:
|
125 |
+
return resp
|
126 |
+
if token.startswith("hf_oauth"):
|
127 |
+
user_info["id"] = resp["sub"]
|
128 |
+
user_info["name"] = resp["preferred_username"]
|
129 |
+
user_info["orgs"] = [resp["orgs"][k]["preferred_username"] for k in range(len(resp["orgs"]))]
|
130 |
+
else:
|
131 |
+
|
132 |
+
user_info["id"] = resp["id"]
|
133 |
+
user_info["name"] = resp["name"]
|
134 |
+
user_info["orgs"] = [resp["orgs"][k]["name"] for k in range(len(resp["orgs"]))]
|
135 |
+
return user_info
|
136 |
+
|
137 |
+
|
138 |
+
def make_clickable_user(user_id):
|
139 |
+
link = "https://huggingface.co/" + user_id
|
140 |
+
return f'<a target="_blank" href="{link}">{user_id}</a>'
|
141 |
+
|
142 |
+
|
143 |
+
def run_evaluation(params, local=False, wait=False):
|
144 |
+
params = json.loads(params)
|
145 |
+
if isinstance(params, str):
|
146 |
+
params = json.loads(params)
|
147 |
+
params = EvalParams(**params)
|
148 |
+
if not local:
|
149 |
+
params.output_path = "/tmp/model"
|
150 |
+
params.save(output_dir=params.output_path)
|
151 |
+
cmd = [
|
152 |
+
"python",
|
153 |
+
"-m",
|
154 |
+
"competitions.evaluate",
|
155 |
+
"--config",
|
156 |
+
os.path.join(params.output_path, "params.json"),
|
157 |
+
]
|
158 |
+
|
159 |
+
cmd = [str(c) for c in cmd]
|
160 |
+
logger.info(cmd)
|
161 |
+
env = os.environ.copy()
|
162 |
+
cmd = shlex.split(" ".join(cmd))
|
163 |
+
process = subprocess.Popen(cmd, env=env)
|
164 |
+
if wait:
|
165 |
+
process.wait()
|
166 |
+
return process.pid
|
167 |
+
|
168 |
+
|
169 |
+
def pause_space(params):
|
170 |
+
if "SPACE_ID" in os.environ:
|
171 |
+
if os.environ["SPACE_ID"].split("/")[-1].startswith("comp-"):
|
172 |
+
logger.info("Pausing space...")
|
173 |
+
api = HfApi(token=params.token)
|
174 |
+
api.pause_space(repo_id=os.environ["SPACE_ID"])
|
175 |
+
|
176 |
+
|
177 |
+
def delete_space(params):
|
178 |
+
if "SPACE_ID" in os.environ:
|
179 |
+
if os.environ["SPACE_ID"].split("/")[-1].startswith("comp-"):
|
180 |
+
logger.info("Deleting space...")
|
181 |
+
api = HfApi(token=params.token)
|
182 |
+
api.delete_repo(repo_id=os.environ["SPACE_ID"], repo_type="space")
|
183 |
+
|
184 |
+
|
185 |
+
def download_submission_info(params):
|
186 |
+
user_fname = hf_hub_download(
|
187 |
+
repo_id=params.competition_id,
|
188 |
+
filename=f"submission_info/{params.team_id}.json",
|
189 |
+
token=params.token,
|
190 |
+
repo_type="dataset",
|
191 |
+
)
|
192 |
+
with open(user_fname, "r", encoding="utf-8") as f:
|
193 |
+
user_submission_info = json.load(f)
|
194 |
+
|
195 |
+
return user_submission_info
|
196 |
+
|
197 |
+
|
198 |
+
def upload_submission_info(params, user_submission_info):
|
199 |
+
user_submission_info_json = json.dumps(user_submission_info, indent=4)
|
200 |
+
user_submission_info_json_bytes = user_submission_info_json.encode("utf-8")
|
201 |
+
user_submission_info_json_buffer = io.BytesIO(user_submission_info_json_bytes)
|
202 |
+
api = HfApi(token=params.token)
|
203 |
+
api.upload_file(
|
204 |
+
path_or_fileobj=user_submission_info_json_buffer,
|
205 |
+
path_in_repo=f"submission_info/{params.team_id}.json",
|
206 |
+
repo_id=params.competition_id,
|
207 |
+
repo_type="dataset",
|
208 |
+
)
|
209 |
+
|
210 |
+
|
211 |
+
def update_submission_status(params, status):
|
212 |
+
user_submission_info = download_submission_info(params)
|
213 |
+
for submission in user_submission_info["submissions"]:
|
214 |
+
if submission["submission_id"] == params.submission_id:
|
215 |
+
submission["status"] = status
|
216 |
+
break
|
217 |
+
upload_submission_info(params, user_submission_info)
|
218 |
+
|
219 |
+
|
220 |
+
def update_submission_score(params, public_score, private_score):
|
221 |
+
user_submission_info = download_submission_info(params)
|
222 |
+
for submission in user_submission_info["submissions"]:
|
223 |
+
if submission["submission_id"] == params.submission_id:
|
224 |
+
submission["public_score"] = public_score
|
225 |
+
submission["private_score"] = private_score
|
226 |
+
submission["status"] = "done"
|
227 |
+
break
|
228 |
+
upload_submission_info(params, user_submission_info)
|
229 |
+
|
230 |
+
|
231 |
+
def monitor(func):
|
232 |
+
def wrapper(*args, **kwargs):
|
233 |
+
params = kwargs.get("params", None)
|
234 |
+
if params is None and len(args) > 0:
|
235 |
+
params = args[0]
|
236 |
+
|
237 |
+
try:
|
238 |
+
return func(*args, **kwargs)
|
239 |
+
except Exception as e:
|
240 |
+
error_message = f"""{func.__name__} has failed due to an exception: {traceback.format_exc()}"""
|
241 |
+
logger.error(error_message)
|
242 |
+
logger.error(str(e))
|
243 |
+
update_submission_status(params, SubmissionStatus.FAILED.value)
|
244 |
+
pause_space(params)
|
245 |
+
|
246 |
+
return wrapper
|
247 |
+
|
248 |
+
|
249 |
+
def uninstall_requirements(requirements_fname):
|
250 |
+
if os.path.exists(requirements_fname):
|
251 |
+
# read the requirements.txt
|
252 |
+
uninstall_list = []
|
253 |
+
with open(requirements_fname, "r", encoding="utf-8") as f:
|
254 |
+
for line in f:
|
255 |
+
if line.startswith("-"):
|
256 |
+
uninstall_list.append(line[1:])
|
257 |
+
|
258 |
+
# create an uninstall.txt
|
259 |
+
with open("uninstall.txt", "w", encoding="utf-8") as f:
|
260 |
+
for line in uninstall_list:
|
261 |
+
f.write(line)
|
262 |
+
|
263 |
+
pipe = subprocess.Popen(
|
264 |
+
[
|
265 |
+
"pip",
|
266 |
+
"uninstall",
|
267 |
+
"-r",
|
268 |
+
"uninstall.txt",
|
269 |
+
"-y",
|
270 |
+
],
|
271 |
+
)
|
272 |
+
pipe.wait()
|
273 |
+
logger.info("Requirements uninstalled.")
|
274 |
+
return
|
275 |
+
|
276 |
+
|
277 |
+
def install_requirements(requirements_fname):
|
278 |
+
# check if params.project_name has a requirements.txt
|
279 |
+
if os.path.exists(requirements_fname):
|
280 |
+
# install the requirements using subprocess, wait for it to finish
|
281 |
+
install_list = []
|
282 |
+
|
283 |
+
with open(requirements_fname, "r", encoding="utf-8") as f:
|
284 |
+
for line in f:
|
285 |
+
# if line startswith - then skip but dont skip if line startswith --
|
286 |
+
if line.startswith("-"):
|
287 |
+
if not line.startswith("--"):
|
288 |
+
continue
|
289 |
+
install_list.append(line)
|
290 |
+
|
291 |
+
with open("install.txt", "w", encoding="utf-8") as f:
|
292 |
+
for line in install_list:
|
293 |
+
f.write(line)
|
294 |
+
|
295 |
+
pipe = subprocess.Popen(
|
296 |
+
[
|
297 |
+
"pip",
|
298 |
+
"install",
|
299 |
+
"-r",
|
300 |
+
"install.txt",
|
301 |
+
],
|
302 |
+
)
|
303 |
+
pipe.wait()
|
304 |
+
logger.info("Requirements installed.")
|
305 |
+
return
|
306 |
+
logger.info("No requirements.txt found. Skipping requirements installation.")
|
307 |
+
return
|
308 |
+
|
309 |
+
|
310 |
+
def is_user_admin(user_token, competition_organization):
|
311 |
+
user_info = token_information(token=user_token)
|
312 |
+
user_orgs = user_info.get("orgs", [])
|
313 |
+
for org in user_orgs:
|
314 |
+
if org == competition_organization:
|
315 |
+
return True
|
316 |
+
return False
|
317 |
+
|
318 |
+
|
319 |
+
def get_team_name(user_token, competition_id, hf_token):
|
320 |
+
user_info = token_information(token=user_token)
|
321 |
+
user_id = user_info["id"]
|
322 |
+
user_team = hf_hub_download(
|
323 |
+
repo_id=competition_id,
|
324 |
+
filename="user_team.json",
|
325 |
+
token=hf_token,
|
326 |
+
repo_type="dataset",
|
327 |
+
)
|
328 |
+
with open(user_team, "r", encoding="utf-8") as f:
|
329 |
+
user_team = json.load(f)
|
330 |
+
|
331 |
+
if user_id not in user_team:
|
332 |
+
return None
|
333 |
+
|
334 |
+
team_id = user_team[user_id]
|
335 |
+
|
336 |
+
team_metadata = hf_hub_download(
|
337 |
+
repo_id=competition_id,
|
338 |
+
filename="teams.json",
|
339 |
+
token=hf_token,
|
340 |
+
repo_type="dataset",
|
341 |
+
)
|
342 |
+
with open(team_metadata, "r", encoding="utf-8") as f:
|
343 |
+
team_metadata = json.load(f)
|
344 |
+
|
345 |
+
team_name = team_metadata[team_id]["name"]
|
346 |
+
return team_name
|
347 |
+
|
348 |
+
|
349 |
+
def update_team_name(user_token, new_team_name, competition_id, hf_token):
|
350 |
+
user_info = token_information(token=user_token)
|
351 |
+
user_id = user_info["id"]
|
352 |
+
user_team = hf_hub_download(
|
353 |
+
repo_id=competition_id,
|
354 |
+
filename="user_team.json",
|
355 |
+
token=hf_token,
|
356 |
+
repo_type="dataset",
|
357 |
+
)
|
358 |
+
with open(user_team, "r", encoding="utf-8") as f:
|
359 |
+
user_team = json.load(f)
|
360 |
+
|
361 |
+
if user_id not in user_team:
|
362 |
+
raise Exception("User is not part of a team")
|
363 |
+
|
364 |
+
team_id = user_team[user_id]
|
365 |
+
|
366 |
+
team_metadata = hf_hub_download(
|
367 |
+
repo_id=competition_id,
|
368 |
+
filename="teams.json",
|
369 |
+
token=hf_token,
|
370 |
+
repo_type="dataset",
|
371 |
+
)
|
372 |
+
with open(team_metadata, "r", encoding="utf-8") as f:
|
373 |
+
team_metadata = json.load(f)
|
374 |
+
|
375 |
+
team_metadata[team_id]["name"] = new_team_name
|
376 |
+
team_metadata_json = json.dumps(team_metadata, indent=4)
|
377 |
+
team_metadata_json_bytes = team_metadata_json.encode("utf-8")
|
378 |
+
team_metadata_json_buffer = io.BytesIO(team_metadata_json_bytes)
|
379 |
+
api = HfApi(token=hf_token)
|
380 |
+
api.upload_file(
|
381 |
+
path_or_fileobj=team_metadata_json_buffer,
|
382 |
+
path_in_repo="teams.json",
|
383 |
+
repo_id=competition_id,
|
384 |
+
repo_type="dataset",
|
385 |
+
)
|
386 |
+
return new_team_name
|