sanjaystarc commited on
Commit
7033b72
Β·
verified Β·
1 Parent(s): 3e39206

Update core_agent.py

Browse files
Files changed (1) hide show
  1. core_agent.py +4 -5
core_agent.py CHANGED
@@ -21,9 +21,8 @@ from plotly.subplots import make_subplots
21
  from dotenv import load_dotenv
22
 
23
  from langchain_google_genai import ChatGoogleGenerativeAI
24
- from langchain.prompts import PromptTemplate
25
- from langchain.chains import LLMChain
26
- from langchain.schema import HumanMessage, SystemMessage
27
 
28
  warnings.filterwarnings("ignore")
29
  load_dotenv()
@@ -37,7 +36,7 @@ CARD_BG = "#1A1A2E"
37
  # ─── LLM Setup ───────────────────────────────────────────────────────────────
38
  def get_llm(api_key: str):
39
  return ChatGoogleGenerativeAI(
40
- model="gemini-2.5-flash",
41
  google_api_key=api_key,
42
  temperature=0.3,
43
  convert_system_message_to_human=True,
@@ -315,4 +314,4 @@ Respond ONLY in valid JSON like:
315
  text = text[4:]
316
  return json.loads(text.strip())
317
  except Exception:
318
- return {"chart_type": "distribution_plots", "x_col": None, "y_col": None, "reason": "Default chart"}
 
21
  from dotenv import load_dotenv
22
 
23
  from langchain_google_genai import ChatGoogleGenerativeAI
24
+ from langchain_core.prompts import PromptTemplate
25
+ from langchain_core.messages import HumanMessage, SystemMessage
 
26
 
27
  warnings.filterwarnings("ignore")
28
  load_dotenv()
 
36
  # ─── LLM Setup ───────────────────────────────────────────────────────────────
37
  def get_llm(api_key: str):
38
  return ChatGoogleGenerativeAI(
39
+ model="gemini-1.5-flash",
40
  google_api_key=api_key,
41
  temperature=0.3,
42
  convert_system_message_to_human=True,
 
314
  text = text[4:]
315
  return json.loads(text.strip())
316
  except Exception:
317
+ return {"chart_type": "distribution_plots", "x_col": None, "y_col": None, "reason": "Default chart"}