| """ |
| 6์ฃผ์ฐจ ๋ชจ๋ธ ์ค์ โ HuggingFace Inference API |
| =========================================== |
| ํ ํฐ์ .env ์ HF_TOKEN ๋๋ HUGGINGFACEHUB_API_TOKEN ์์ ์ฝ๋๋ค. |
| HF Space์ ๋ฐฐํฌํ ๋๋ Settings > Secrets ์ HF_TOKEN ์ ๋ฑ๋กํ๋ค. |
| """ |
|
|
| from __future__ import annotations |
|
|
| import os |
|
|
| from huggingface_hub import InferenceClient |
|
|
| |
| VISION_MODEL = "nateraw/food" |
|
|
| |
| LLM_MODEL = "meta-llama/Meta-Llama-3-8B-Instruct" |
|
|
|
|
| def get_token() -> str: |
| token = os.getenv("HF_TOKEN") or os.getenv("HUGGINGFACEHUB_API_TOKEN") |
| if not token: |
| raise SystemExit( |
| "HF_TOKEN(๋๋ HUGGINGFACEHUB_API_TOKEN) ํ๊ฒฝ๋ณ์๊ฐ ๋น์ด ์์ต๋๋ค.\n" |
| " 1) https://huggingface.co/settings/tokens ์์ Read ํ ํฐ ๋ฐ๊ธ\n" |
| " 2) .env ์ HF_TOKEN=hf_xxx ์ถ๊ฐ (๋ก์ปฌ)\n" |
| " 3) HF Space: Settings > Secrets ์ HF_TOKEN ๋ฑ๋ก" |
| ) |
| return token |
|
|
|
|
| def get_client() -> InferenceClient: |
| """์ด๋ฏธ์ง ๋ถ๋ฅ์ฉ ํด๋ผ์ด์ธํธ.""" |
| return InferenceClient(token=get_token()) |
|
|