sql_data_analyst / upload_hf.py
YashashMathur's picture
SQL Data Analyst OpenEnv - Initial commit
d103a0f verified
raw
history blame
1.42 kB
from huggingface_hub import HfApi
import os
import shutil
api = HfApi()
temp_dir = "./temp_upload"
if os.path.exists(temp_dir):
shutil.rmtree(temp_dir)
os.makedirs(temp_dir)
for root, dirs, files in os.walk("."):
skip_dirs = [
".git",
"__pycache__",
"hf_space",
".cache",
"openenvhackathon",
".pytest_cache",
"temp_upload",
"env\\__pycache__",
"tests\\__pycache__",
"baseline\\__pycache__",
"server\\__pycache__",
]
if any(s in root for s in skip_dirs):
continue
temp_root = root.replace(".\\", temp_dir + "\\")
os.makedirs(temp_root, exist_ok=True)
for f in files:
if (
not f.endswith(".pyc")
and not f.endswith(".lock")
and not f.startswith(".")
and not f.endswith(".metadata")
):
src = os.path.join(root, f)
dst = os.path.join(temp_root, f)
try:
shutil.copy2(src, dst)
except Exception as e:
print(f"Skipped {src}: {e}")
print("Prepared files")
api.upload_folder(
folder_path=temp_dir,
repo_id="YashashMathur/sql_data_analyst",
repo_type="space",
commit_message="SQL Data Analyst OpenEnv - Initial commit",
)
print("SUCCESS!")
print("https://huggingface.co/spaces/YashashMathur/sql_data_analyst")
shutil.rmtree(temp_dir)