prashantmatlani commited on
Commit
5683339
·
1 Parent(s): 6819726

modified agent_llm, agent, inference to use base api url

Browse files
Files changed (3) hide show
  1. agent.py +36 -12
  2. agent_llm.py +55 -21
  3. requirements.txt +0 -1
agent.py CHANGED
@@ -9,12 +9,18 @@ import time
9
  import json
10
  import random
11
 
12
- from dotenv import load_dotenv
13
  from openai import OpenAI
14
- from groq import Groq
15
 
16
  from app.env import CustomerSupportEnv
17
 
 
 
 
 
 
 
 
18
  # load_dotenv()
19
 
20
  # client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
@@ -22,10 +28,16 @@ from app.env import CustomerSupportEnv
22
  # BASE_URL = "http://127.0.0.1:8001"
23
  #load_dotenv("/home/pb/projects/openenv-customer-support/.env")
24
 
25
- BASE_DIR = os.path.dirname(os.path.abspath(__file__))
26
- ENV_PATH = os.path.join(BASE_DIR, ".env")
 
 
 
 
 
 
 
27
 
28
- load_dotenv(ENV_PATH)
29
  print(f"\nCWD: {os.getcwd()}")
30
 
31
  #client = Groq(api_key=os.getenv("GROQ_API_KEY"))
@@ -46,13 +58,25 @@ print(f"\nCWD: {os.getcwd()}")
46
  # CONFIG (NEW - VENDOR NEUTRAL)
47
  # =========================
48
  def get_llm_client():
49
- return OpenAI(
50
- base_url=os.getenv(
51
- "API_BASE_URL",
52
- "https://router.huggingface.co/v1"
53
- ),
54
- api_key=os.getenv("API_KEY") or os.getenv("GROQ_API_KEY")
55
- )
 
 
 
 
 
 
 
 
 
 
 
 
56
 
57
  client = get_llm_client()
58
 
 
9
  import json
10
  import random
11
 
 
12
  from openai import OpenAI
13
+ #from groq import Groq
14
 
15
  from app.env import CustomerSupportEnv
16
 
17
+ BASE_DIR = os.path.dirname(os.path.abspath(__file__))
18
+
19
+ #from dotenv import load_dotenv
20
+ #ENV_PATH = os.path.join(BASE_DIR, ".env")
21
+ #load_dotenv(ENV_PATH)
22
+
23
+
24
  # load_dotenv()
25
 
26
  # client = OpenAI(api_key=os.getenv("OPENAI_API_KEY"))
 
28
  # BASE_URL = "http://127.0.0.1:8001"
29
  #load_dotenv("/home/pb/projects/openenv-customer-support/.env")
30
 
31
+ try:
32
+ from dotenv import load_dotenv
33
+
34
+ ENV_PATH = os.path.join(BASE_DIR, ".env")
35
+ load_dotenv(ENV_PATH)
36
+
37
+ except ImportError:
38
+ # dotenv not available in validator environment
39
+ pass
40
 
 
41
  print(f"\nCWD: {os.getcwd()}")
42
 
43
  #client = Groq(api_key=os.getenv("GROQ_API_KEY"))
 
58
  # CONFIG (NEW - VENDOR NEUTRAL)
59
  # =========================
60
  def get_llm_client():
61
+ if OpenAI is None:
62
+ return None
63
+
64
+ api_key = os.getenv("API_KEY") or os.getenv("GROQ_API_KEY")
65
+
66
+ if not api_key:
67
+ return None # 🔥 critical
68
+
69
+ try:
70
+ return OpenAI(
71
+ base_url=os.getenv(
72
+ "API_BASE_URL",
73
+ "https://router.huggingface.co/v1"
74
+ ),
75
+ api_key=api_key
76
+ )
77
+ except Exception:
78
+ return None
79
+
80
 
81
  client = get_llm_client()
82
 
agent_llm.py CHANGED
@@ -13,27 +13,54 @@
13
  import os
14
  import json
15
  import time
16
- from dotenv import load_dotenv
17
- from groq import Groq
18
- from openai import OpenAI
19
 
20
  from app.env import CustomerSupportEnv
21
 
22
- load_dotenv()
 
 
23
 
24
  #client = Groq(api_key=os.getenv("GROQ_API_KEY"))
25
 
26
  # =========================
27
- # CONFIG (NEW)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  # =========================
29
  def get_llm_client():
30
- return OpenAI(
31
- base_url=os.getenv(
32
- "API_BASE_URL",
33
- "https://router.huggingface.co/v1"
34
- ),
35
- api_key=os.getenv("API_KEY") or os.getenv("GROQ_API_KEY")
36
- )
 
 
 
 
 
 
 
 
 
 
 
 
37
 
38
  client = get_llm_client()
39
 
@@ -76,17 +103,24 @@ FORMAT:
76
 
77
 
78
  # =========================
79
- # LLM CALL
80
  # =========================
81
  def call_llm(prompt):
82
- completion = client.chat.completions.create(
83
- model=os.getenv("MODEL_NAME"),
84
- #model="llama-3.1-8b-instant",
85
- messages=[{"role": "user", "content": prompt}],
86
- temperature=0.2,
87
- response_format={"type": "json_object"}
88
- )
89
- return completion.choices[0].message.content.strip()
 
 
 
 
 
 
 
90
 
91
 
92
  # =========================
 
13
  import os
14
  import json
15
  import time
16
+ #from groq import Groq
17
+ #from openai import OpenAI
 
18
 
19
  from app.env import CustomerSupportEnv
20
 
21
+ #from dotenv import load_dotenv
22
+ #load_dotenv()
23
+
24
 
25
  #client = Groq(api_key=os.getenv("GROQ_API_KEY"))
26
 
27
  # =========================
28
+ # OPTIONAL IMPORTS (SAFE)
29
+ # =========================
30
+ try:
31
+ from openai import OpenAI
32
+ except ImportError:
33
+ OpenAI = None
34
+
35
+ try:
36
+ from dotenv import load_dotenv
37
+ load_dotenv()
38
+ except ImportError:
39
+ pass
40
+
41
+ # =========================
42
+ # CONFIG - CLIENT-SAFE
43
  # =========================
44
  def get_llm_client():
45
+ if OpenAI is None:
46
+ return None
47
+
48
+ api_key = os.getenv("API_KEY") or os.getenv("GROQ_API_KEY")
49
+
50
+ if not api_key:
51
+ return None # 🔥 critical
52
+
53
+ try:
54
+ return OpenAI(
55
+ base_url=os.getenv(
56
+ "API_BASE_URL",
57
+ "https://router.huggingface.co/v1"
58
+ ),
59
+ api_key=api_key
60
+ )
61
+ except Exception:
62
+ return None
63
+
64
 
65
  client = get_llm_client()
66
 
 
103
 
104
 
105
  # =========================
106
+ # LLM CALL (SAFE)
107
  # =========================
108
  def call_llm(prompt):
109
+ if client is None:
110
+ return None # 🔥 triggers fallback
111
+
112
+ try:
113
+ completion = client.chat.completions.create(
114
+ model=os.getenv("MODEL_NAME", "unknown-model"),
115
+ messages=[{"role": "user", "content": prompt}],
116
+ temperature=0.2,
117
+ response_format={"type": "json_object"}
118
+ )
119
+
120
+ return completion.choices[0].message.content.strip()
121
+
122
+ except Exception:
123
+ return None # 🔥 triggers fallback
124
 
125
 
126
  # =========================
requirements.txt CHANGED
@@ -1,7 +1,6 @@
1
  fastapi
2
  uvicorn
3
  pydantic
4
- openai
5
  groq
6
  python-dotenv
7
  pyyaml
 
1
  fastapi
2
  uvicorn
3
  pydantic
 
4
  groq
5
  python-dotenv
6
  pyyaml