Spaces:
Build error
Build error
| # Copy to .env and fill in real values. Never commit .env. | |
| # AMD Developer Cloud β OpenAI-compatible endpoint (vLLM serving on MI300X) | |
| AMD_DEV_CLOUD_BASE_URL= | |
| AMD_DEV_CLOUD_API_KEY= | |
| # Hugging Face β for HF Hub artifact pushes + Inference API fallback | |
| HF_TOKEN= | |
| # OpenAI β local-dev fallback only. NOT used in submission. | |
| # Lets the Gradio UI work locally while AMD Dev Cloud is being provisioned. | |
| OPENAI_API_KEY= | |
| # Active provider for the inference clients. One of: amd | hf | openai | |
| SIGNBRIDGE_PROVIDER=amd | |
| # Model IDs (overridable for experimentation) | |
| SIGNBRIDGE_COMPOSER_MODEL=meta-llama/Llama-3.1-8B-Instruct | |
| SIGNBRIDGE_TTS_MODEL=tts_models/multilingual/multi-dataset/xtts_v2 | |
| SIGNBRIDGE_STT_MODEL=openai/whisper-large-v3 | |
| # Sign classifier β local artifact (after training) or HF Hub repo | |
| SIGNBRIDGE_CLASSIFIER_PATH=models/classifier.pt | |
| SIGNBRIDGE_CLASSIFIER_HF_REPO=lucas-loo/signbridge-classifier | |
| # Webcam frame sampling rate (Hz) | |
| SIGNBRIDGE_FRAME_RATE=5 | |
| # Backend URL when the Space talks to a separate FastAPI server (leave blank for in-process) | |
| SIGNBRIDGE_BACKEND_URL= | |