Álvaro Valenzuela Valdes commited on
Commit
2be65c0
·
1 Parent(s): 6314c8a

Update Gemini SDK to google-genai v2.0.0 (Copilot/VS Code updates)

Browse files
backend/app/services/llm.py CHANGED
@@ -1,18 +1,15 @@
1
  import hashlib
2
  import json
3
  import httpx
4
- import google.generativeai as genai
5
  from app.config import settings
6
  from app.schemas.analysis import AnalysisResult, RiskItem, ActionItem, CompanyProfile, Tender
7
  from app.services.report import generate_markdown_report
8
 
9
- # Configure Gemini
10
- genai.configure(api_key=settings.gemini_api_key)
11
-
12
  async def call_gemini(prompt: str, is_json: bool = False) -> str:
13
  if not settings.gemini_api_key:
14
  return ""
15
-
16
  try:
17
  generation_config = {
18
  "temperature": 0.2,
@@ -20,17 +17,18 @@ async def call_gemini(prompt: str, is_json: bool = False) -> str:
20
  "top_k": 40,
21
  "max_output_tokens": 8192,
22
  }
23
-
24
  if is_json:
25
  generation_config["response_mime_type"] = "application/json"
26
-
27
- model = genai.GenerativeModel(
28
- model_name="gemini-2.0-flash",
29
- generation_config=generation_config,
30
- )
31
-
32
- response = await model.generate_content_async(prompt)
33
- return response.text
 
34
  except Exception as e:
35
  print(f"Error calling Gemini (is_json={is_json}): {e}, trying fallback...")
36
  if settings.groq_api_key:
 
1
  import hashlib
2
  import json
3
  import httpx
4
+ from google import genai
5
  from app.config import settings
6
  from app.schemas.analysis import AnalysisResult, RiskItem, ActionItem, CompanyProfile, Tender
7
  from app.services.report import generate_markdown_report
8
 
 
 
 
9
  async def call_gemini(prompt: str, is_json: bool = False) -> str:
10
  if not settings.gemini_api_key:
11
  return ""
12
+
13
  try:
14
  generation_config = {
15
  "temperature": 0.2,
 
17
  "top_k": 40,
18
  "max_output_tokens": 8192,
19
  }
20
+
21
  if is_json:
22
  generation_config["response_mime_type"] = "application/json"
23
+
24
+ async with genai.Client(api_key=settings.gemini_api_key).aio as client:
25
+ response = await client.models.generate_content(
26
+ model="gemini-2.0-flash",
27
+ contents=prompt,
28
+ config=generation_config,
29
+ )
30
+
31
+ return getattr(response, "text", "") or ""
32
  except Exception as e:
33
  print(f"Error calling Gemini (is_json={is_json}): {e}, trying fallback...")
34
  if settings.groq_api_key:
backend/requirements.txt CHANGED
@@ -3,7 +3,7 @@ uvicorn[standard]==0.23.2
3
  httpx==0.27.0
4
  pydantic==2.8.0
5
  pydantic-settings==2.4.0
6
- google-generativeai>=0.8.3
7
  pypdf==4.2.0
8
  python-multipart==0.0.9
9
  sqlalchemy==2.0.25
 
3
  httpx==0.27.0
4
  pydantic==2.8.0
5
  pydantic-settings==2.4.0
6
+ google-genai>=2.0.0
7
  pypdf==4.2.0
8
  python-multipart==0.0.9
9
  sqlalchemy==2.0.25