Upload app.py with huggingface_hub
Browse files
app.py
CHANGED
|
@@ -104,17 +104,42 @@ def load_dataset_df():
|
|
| 104 |
|
| 105 |
|
| 106 |
def load_request_df():
|
| 107 |
-
|
| 108 |
-
|
| 109 |
-
|
| 110 |
-
|
| 111 |
-
|
| 112 |
-
|
| 113 |
-
|
| 114 |
-
|
| 115 |
-
|
| 116 |
-
|
| 117 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 118 |
|
| 119 |
|
| 120 |
def compute_normalized_overall(df):
|
|
@@ -191,10 +216,12 @@ def save_csv_to_dataset(df, repo_id, file_name, commit_message):
|
|
| 191 |
return f"Submit failed while uploading {repo_id}/{file_name}: {str(e)[:300]}"
|
| 192 |
|
| 193 |
|
| 194 |
-
def save_request_artifact(payload):
|
| 195 |
if not HF_TOKEN:
|
| 196 |
return "Submit failed: HF_TOKEN secret is missing."
|
| 197 |
|
|
|
|
|
|
|
| 198 |
try:
|
| 199 |
api = HfApi(token=HF_TOKEN)
|
| 200 |
with tempfile.TemporaryDirectory() as tmpdir:
|
|
@@ -207,7 +234,7 @@ def save_request_artifact(payload):
|
|
| 207 |
api.upload_file(
|
| 208 |
path_or_fileobj=raw_path,
|
| 209 |
path_in_repo=raw_name,
|
| 210 |
-
repo_id=
|
| 211 |
repo_type="dataset",
|
| 212 |
commit_message=f"New request: {payload['Model']} ({payload['Split']})",
|
| 213 |
)
|
|
@@ -260,16 +287,14 @@ def submit_entry(model_name, split_name, team, contact, model_link, av, at, vt,
|
|
| 260 |
req_df = pd.concat([req_df, pd.DataFrame([row])], ignore_index=True)
|
| 261 |
req_df = ensure_request_schema(req_df)
|
| 262 |
|
| 263 |
-
save_msg =
|
| 264 |
req_df,
|
| 265 |
-
REQUEST_REPO,
|
| 266 |
-
REQUEST_FILE,
|
| 267 |
f"Add request: {row['Model']} ({row['Split']})",
|
| 268 |
)
|
| 269 |
if save_msg == "ok":
|
| 270 |
-
save_msg = save_request_artifact(row)
|
| 271 |
if save_msg == "ok":
|
| 272 |
-
save_msg = f"Submit succeeded. Request queued: {request_id}. Awaiting review."
|
| 273 |
|
| 274 |
latest_df, load_msg = load_dataset_df()
|
| 275 |
return save_msg, view_table(latest_df, split_selector, sort_selector), load_msg
|
|
@@ -308,10 +333,8 @@ def review_request(request_id, decision, review_note, admin_token, split_selecto
|
|
| 308 |
req_df.loc[idx, "ReviewedAt"] = now
|
| 309 |
req_df.loc[idx, "ReviewNote"] = (review_note or "").strip()
|
| 310 |
|
| 311 |
-
save_msg =
|
| 312 |
req_df,
|
| 313 |
-
REQUEST_REPO,
|
| 314 |
-
REQUEST_FILE,
|
| 315 |
f"{decision} request: {request_id}",
|
| 316 |
)
|
| 317 |
if save_msg != "ok":
|
|
|
|
| 104 |
|
| 105 |
|
| 106 |
def load_request_df():
|
| 107 |
+
# Prefer dedicated request repo; fallback to results repo when request repo is unavailable.
|
| 108 |
+
candidates = [REQUEST_REPO]
|
| 109 |
+
if DATASET_REPO not in candidates:
|
| 110 |
+
candidates.append(DATASET_REPO)
|
| 111 |
+
|
| 112 |
+
errors = []
|
| 113 |
+
for repo in candidates:
|
| 114 |
+
try:
|
| 115 |
+
local_csv = hf_hub_download(
|
| 116 |
+
repo_id=repo,
|
| 117 |
+
repo_type="dataset",
|
| 118 |
+
filename=REQUEST_FILE,
|
| 119 |
+
token=HF_TOKEN,
|
| 120 |
+
)
|
| 121 |
+
df = pd.read_csv(local_csv)
|
| 122 |
+
return ensure_request_schema(df), f"Loaded requests: {repo}/{REQUEST_FILE}"
|
| 123 |
+
except Exception as e:
|
| 124 |
+
errors.append(f"{repo}: {str(e)[:80]}")
|
| 125 |
+
|
| 126 |
+
return pd.DataFrame(columns=REQUEST_COLUMNS), f"Requests file not found yet. Tried: {' | '.join(errors)}"
|
| 127 |
+
|
| 128 |
+
|
| 129 |
+
def save_requests_csv(df, commit_message):
|
| 130 |
+
"""Save requests CSV with repo fallback for robustness."""
|
| 131 |
+
preferred = [REQUEST_REPO]
|
| 132 |
+
if DATASET_REPO not in preferred:
|
| 133 |
+
preferred.append(DATASET_REPO)
|
| 134 |
+
|
| 135 |
+
errors = []
|
| 136 |
+
for repo in preferred:
|
| 137 |
+
msg = save_csv_to_dataset(df, repo, REQUEST_FILE, commit_message)
|
| 138 |
+
if msg == "ok":
|
| 139 |
+
return "ok", repo
|
| 140 |
+
errors.append(f"{repo}: {msg}")
|
| 141 |
+
|
| 142 |
+
return f"Submit failed. Tried repos -> {' | '.join(errors)}", None
|
| 143 |
|
| 144 |
|
| 145 |
def compute_normalized_overall(df):
|
|
|
|
| 216 |
return f"Submit failed while uploading {repo_id}/{file_name}: {str(e)[:300]}"
|
| 217 |
|
| 218 |
|
| 219 |
+
def save_request_artifact(payload, repo_id=None):
|
| 220 |
if not HF_TOKEN:
|
| 221 |
return "Submit failed: HF_TOKEN secret is missing."
|
| 222 |
|
| 223 |
+
target_repo = repo_id or REQUEST_REPO
|
| 224 |
+
|
| 225 |
try:
|
| 226 |
api = HfApi(token=HF_TOKEN)
|
| 227 |
with tempfile.TemporaryDirectory() as tmpdir:
|
|
|
|
| 234 |
api.upload_file(
|
| 235 |
path_or_fileobj=raw_path,
|
| 236 |
path_in_repo=raw_name,
|
| 237 |
+
repo_id=target_repo,
|
| 238 |
repo_type="dataset",
|
| 239 |
commit_message=f"New request: {payload['Model']} ({payload['Split']})",
|
| 240 |
)
|
|
|
|
| 287 |
req_df = pd.concat([req_df, pd.DataFrame([row])], ignore_index=True)
|
| 288 |
req_df = ensure_request_schema(req_df)
|
| 289 |
|
| 290 |
+
save_msg, request_repo_used = save_requests_csv(
|
| 291 |
req_df,
|
|
|
|
|
|
|
| 292 |
f"Add request: {row['Model']} ({row['Split']})",
|
| 293 |
)
|
| 294 |
if save_msg == "ok":
|
| 295 |
+
save_msg = save_request_artifact(row, repo_id=request_repo_used)
|
| 296 |
if save_msg == "ok":
|
| 297 |
+
save_msg = f"Submit succeeded. Request queued: {request_id}. Stored in {request_repo_used}. Awaiting review."
|
| 298 |
|
| 299 |
latest_df, load_msg = load_dataset_df()
|
| 300 |
return save_msg, view_table(latest_df, split_selector, sort_selector), load_msg
|
|
|
|
| 333 |
req_df.loc[idx, "ReviewedAt"] = now
|
| 334 |
req_df.loc[idx, "ReviewNote"] = (review_note or "").strip()
|
| 335 |
|
| 336 |
+
save_msg, _ = save_requests_csv(
|
| 337 |
req_df,
|
|
|
|
|
|
|
| 338 |
f"{decision} request: {request_id}",
|
| 339 |
)
|
| 340 |
if save_msg != "ok":
|