Spaces:
Sleeping
Sleeping
Commit ·
af9bb6b
1
Parent(s): ba5b759
fix gemini model
Browse files- codes/1_planning.py +1 -1
- codes/llm_provider.py +2 -2
codes/1_planning.py
CHANGED
|
@@ -14,7 +14,7 @@ parser.add_argument('--paper_name', type=str, help='Name of the paper (deprecate
|
|
| 14 |
parser.add_argument('--blog_name', type=str, help='Name of the blog')
|
| 15 |
parser.add_argument('--content_name', type=str, help='Name of the content (paper or blog)')
|
| 16 |
parser.add_argument('--gpt_version', type=str, help='Model version (deprecated, use --model)')
|
| 17 |
-
parser.add_argument('--model', type=str, help='Model name (e.g., gpt-4o-mini, gemini-
|
| 18 |
parser.add_argument('--provider', type=str, default='gemini', choices=['openai', 'gemini', 'gemma'], help='LLM provider to use')
|
| 19 |
parser.add_argument('--paper_format', type=str, default="JSON", choices=["JSON", "LaTeX"], help='Format for papers')
|
| 20 |
parser.add_argument('--blog_format', type=str, default="JSON", choices=["JSON", "Markdown", "HTML"], help='Format for blogs')
|
|
|
|
| 14 |
parser.add_argument('--blog_name', type=str, help='Name of the blog')
|
| 15 |
parser.add_argument('--content_name', type=str, help='Name of the content (paper or blog)')
|
| 16 |
parser.add_argument('--gpt_version', type=str, help='Model version (deprecated, use --model)')
|
| 17 |
+
parser.add_argument('--model', type=str, help='Model name (e.g., gpt-4o-mini, gemini-2.0-flash)')
|
| 18 |
parser.add_argument('--provider', type=str, default='gemini', choices=['openai', 'gemini', 'gemma'], help='LLM provider to use')
|
| 19 |
parser.add_argument('--paper_format', type=str, default="JSON", choices=["JSON", "LaTeX"], help='Format for papers')
|
| 20 |
parser.add_argument('--blog_format', type=str, default="JSON", choices=["JSON", "Markdown", "HTML"], help='Format for blogs')
|
codes/llm_provider.py
CHANGED
|
@@ -79,7 +79,7 @@ class OpenAIProvider(LLMProvider):
|
|
| 79 |
|
| 80 |
|
| 81 |
class GeminiProvider(LLMProvider):
|
| 82 |
-
"""Google Gemini API
|
| 83 |
|
| 84 |
def __init__(self, api_key: Optional[str] = None):
|
| 85 |
try:
|
|
@@ -262,7 +262,7 @@ def get_default_model(provider_name: str) -> str:
|
|
| 262 |
"""Get default model for a provider"""
|
| 263 |
defaults = {
|
| 264 |
'openai': 'gpt-4o-mini',
|
| 265 |
-
'gemini': 'gemini-
|
| 266 |
'gemma': 'google/gemma-3-27b-it',
|
| 267 |
}
|
| 268 |
return defaults.get(provider_name, 'gpt-4o-mini')
|
|
|
|
| 79 |
|
| 80 |
|
| 81 |
class GeminiProvider(LLMProvider):
|
| 82 |
+
"""Google Gemini API implementat`ion"""
|
| 83 |
|
| 84 |
def __init__(self, api_key: Optional[str] = None):
|
| 85 |
try:
|
|
|
|
| 262 |
"""Get default model for a provider"""
|
| 263 |
defaults = {
|
| 264 |
'openai': 'gpt-4o-mini',
|
| 265 |
+
'gemini': 'gemini-2.0-flash', # valid, free tier, no models/ prefix needed
|
| 266 |
'gemma': 'google/gemma-3-27b-it',
|
| 267 |
}
|
| 268 |
return defaults.get(provider_name, 'gpt-4o-mini')
|