Asish Karthikeya Gogineni commited on
Commit
6bdaaac
·
1 Parent(s): 4ba30a0

Deploy Sentinel AI 2026-04-06_16:26:49

Browse files
Files changed (5) hide show
  1. alphavantage_mcp.py +15 -1
  2. mcp_gateway.py +22 -7
  3. private_mcp.py +43 -18
  4. requirements.txt +3 -2
  5. tavily_mcp.py +9 -1
alphavantage_mcp.py CHANGED
@@ -35,7 +35,12 @@ else:
35
 
36
  # --- FastAPI App & Alpha Vantage Client ---
37
  app = FastAPI(title="Aegis Alpha Vantage MCP Server")
38
- ts = TimeSeries(key=ALPHA_VANTAGE_API_KEY, output_format='json')
 
 
 
 
 
39
 
40
  @app.post("/market_data")
41
  async def get_market_data(payload: dict):
@@ -58,6 +63,9 @@ async def get_market_data(payload: dict):
58
  logger.info(f"Received market data request for symbol: {symbol}, time_range: {time_range}")
59
 
60
  try:
 
 
 
61
  # Route to appropriate API based on time range
62
  if time_range == "INTRADAY":
63
  # Intraday data (last 4-6 hours, 5-min intervals)
@@ -220,6 +228,9 @@ async def get_company_overview(payload: dict):
220
  logger.info(f"Fetching company overview for {symbol}")
221
 
222
  try:
 
 
 
223
  url = "https://www.alphavantage.co/query"
224
  params = {
225
  "function": "OVERVIEW",
@@ -349,6 +360,9 @@ async def get_global_quote(payload: dict):
349
  logger.info(f"Fetching global quote for {symbol}")
350
 
351
  try:
 
 
 
352
  url = "https://www.alphavantage.co/query"
353
  params = {
354
  "function": "GLOBAL_QUOTE",
 
35
 
36
  # --- FastAPI App & Alpha Vantage Client ---
37
  app = FastAPI(title="Aegis Alpha Vantage MCP Server")
38
+ ts = None
39
+ if ALPHA_VANTAGE_API_KEY:
40
+ try:
41
+ ts = TimeSeries(key=ALPHA_VANTAGE_API_KEY, output_format='json')
42
+ except Exception as e:
43
+ logger.error(f"Failed to initialize Alpha Vantage TimeSeries: {e}")
44
 
45
  @app.post("/market_data")
46
  async def get_market_data(payload: dict):
 
63
  logger.info(f"Received market data request for symbol: {symbol}, time_range: {time_range}")
64
 
65
  try:
66
+ if not ts:
67
+ raise ValueError("TimeSeries not initialized (missing API key).")
68
+
69
  # Route to appropriate API based on time range
70
  if time_range == "INTRADAY":
71
  # Intraday data (last 4-6 hours, 5-min intervals)
 
228
  logger.info(f"Fetching company overview for {symbol}")
229
 
230
  try:
231
+ if not ALPHA_VANTAGE_API_KEY:
232
+ raise ValueError("Alpha Vantage API Key is missing.")
233
+
234
  url = "https://www.alphavantage.co/query"
235
  params = {
236
  "function": "OVERVIEW",
 
360
  logger.info(f"Fetching global quote for {symbol}")
361
 
362
  try:
363
+ if not ALPHA_VANTAGE_API_KEY:
364
+ raise ValueError("Alpha Vantage API Key is missing.")
365
+
366
  url = "https://www.alphavantage.co/query"
367
  params = {
368
  "function": "GLOBAL_QUOTE",
mcp_gateway.py CHANGED
@@ -17,14 +17,26 @@ logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(level
17
  logger = logging.getLogger("MCP_Gateway")
18
 
19
  # --- Import Microservices for Consolidation ---
 
20
  try:
21
  from tavily_mcp import app as tavily_app
 
 
 
 
 
 
22
  from alphavantage_mcp import app as alphavantage_app
 
 
 
 
 
 
23
  from private_mcp import app as private_app
24
- logger.info("Successfully imported microservices for consolidation.")
25
- except ImportError as e:
26
- logger.critical(f"Failed to import microservices: {e}")
27
- raise
28
 
29
  # --- Configuration (Updated for Monolithic Mode) ---
30
  # Default to internal mounted paths on the same port (8000)
@@ -45,9 +57,12 @@ app.add_middleware(
45
  )
46
 
47
  # --- Mount Microservices ---
48
- app.mount("/tavily", tavily_app)
49
- app.mount("/alphavantage", alphavantage_app)
50
- app.mount("/private", private_app)
 
 
 
51
 
52
  client = httpx.AsyncClient()
53
 
 
17
  logger = logging.getLogger("MCP_Gateway")
18
 
19
  # --- Import Microservices for Consolidation ---
20
+ tavily_app = None
21
  try:
22
  from tavily_mcp import app as tavily_app
23
+ logger.info("Successfully imported Tavily microservice.")
24
+ except Exception as e:
25
+ logger.error(f"Failed to import Tavily microservice: {e}")
26
+
27
+ alphavantage_app = None
28
+ try:
29
  from alphavantage_mcp import app as alphavantage_app
30
+ logger.info("Successfully imported AlphaVantage microservice.")
31
+ except Exception as e:
32
+ logger.error(f"Failed to import AlphaVantage microservice: {e}")
33
+
34
+ private_app = None
35
+ try:
36
  from private_mcp import app as private_app
37
+ logger.info("Successfully imported Private microservice.")
38
+ except Exception as e:
39
+ logger.error(f"Failed to import Private microservice: {e}")
 
40
 
41
  # --- Configuration (Updated for Monolithic Mode) ---
42
  # Default to internal mounted paths on the same port (8000)
 
57
  )
58
 
59
  # --- Mount Microservices ---
60
+ if tavily_app:
61
+ app.mount("/tavily", tavily_app)
62
+ if alphavantage_app:
63
+ app.mount("/alphavantage", alphavantage_app)
64
+ if private_app:
65
+ app.mount("/private", private_app)
66
 
67
  client = httpx.AsyncClient()
68
 
private_mcp.py CHANGED
@@ -3,9 +3,11 @@ from fastapi import FastAPI, HTTPException
3
  import uvicorn
4
  import sqlite3
5
  import logging
6
- from langchain_core.prompts import ChatPromptTemplate
7
- from langchain_core.output_parsers import StrOutputParser
8
- from langchain_ollama import ChatOllama
 
 
9
 
10
  # --- Logging Setup ---
11
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
@@ -14,26 +16,47 @@ logger = logging.getLogger("Private_MCP_Server")
14
  # --- Database Configuration ---
15
  DB_FILE = "portfolio.db"
16
 
17
- # --- LLM Configuration (Local Llama 3) ---
18
- # This connects to the Ollama application running on your machine.
19
- # Make sure Ollama and the llama3 model are running.
20
- llm = ChatOllama(model="llama3", temperature=0)
21
-
22
- # --- Text-to-SQL Prompt Engineering ---
23
- # This prompt is carefully designed to make Llama 3 generate ONLY safe SQL queries.
24
- text_to_sql_prompt = ChatPromptTemplate.from_messages([
25
- ("system",
26
- """You are a Text-to-SQL assistant. Convert the question to a read-only SQLite query for the 'holdings' table.
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
27
  Schema: symbol (TEXT), shares (INTEGER), average_cost (REAL).
28
  RULES:
29
  1. SELECT only. No INSERT/UPDATE/DELETE.
30
  2. Output ONLY the SQL query. No markdown.
31
  """),
32
- ("human", "Question: {question}")
33
- ])
 
 
 
 
 
34
 
35
- # Create the LangChain chain for Text-to-SQL
36
- sql_generation_chain = text_to_sql_prompt | llm | StrOutputParser()
37
 
38
  # --- FastAPI App ---
39
  app = FastAPI(title="Aegis Private MCP Server")
@@ -127,8 +150,10 @@ async def get_portfolio_data(payload: dict):
127
  try:
128
  # Step 1: Generate the SQL query using the local LLM
129
  try:
 
 
130
  generated_sql = await sql_generation_chain.ainvoke({"question": question})
131
- logger.info(f"Llama 3 generated SQL: {generated_sql}")
132
  except Exception as llm_error:
133
  logger.warning(f"LLM generation failed (likely Ollama offline): {llm_error}. Using fallback logic.")
134
  # Fallback Logic: Dynamic symbol extraction
 
3
  import uvicorn
4
  import sqlite3
5
  import logging
6
+ import os
7
+ from dotenv import load_dotenv
8
+
9
+ # --- Configuration ---
10
+ load_dotenv()
11
 
12
  # --- Logging Setup ---
13
  logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
 
16
  # --- Database Configuration ---
17
  DB_FILE = "portfolio.db"
18
 
19
+ # --- LLM Configuration (Google Gemini) ---
20
+ # For cloud deployment on HF Spaces, we use Gemini instead of local Llama.
21
+ sql_generation_chain = None
22
+ try:
23
+ from langchain_google_genai import ChatGoogleGenerativeAI
24
+
25
+ GOOGLE_API_KEY = os.getenv("GOOGLE_API_KEY")
26
+ if not GOOGLE_API_KEY:
27
+ try:
28
+ import toml
29
+ secrets_path = os.path.join(os.path.dirname(__file__), ".streamlit", "secrets.toml")
30
+ if os.path.exists(secrets_path):
31
+ secrets = toml.load(secrets_path)
32
+ GOOGLE_API_KEY = secrets.get("GOOGLE_API_KEY")
33
+ except Exception:
34
+ pass
35
+
36
+ if GOOGLE_API_KEY:
37
+ from langchain_core.prompts import ChatPromptTemplate
38
+ from langchain_core.output_parsers import StrOutputParser
39
+
40
+ llm = ChatGoogleGenerativeAI(model="gemini-2.5-flash", temperature=0, google_api_key=GOOGLE_API_KEY)
41
+
42
+ text_to_sql_prompt = ChatPromptTemplate.from_messages([
43
+ ("system",
44
+ """You are a Text-to-SQL assistant. Convert the question to a read-only SQLite query for the 'holdings' table.
45
  Schema: symbol (TEXT), shares (INTEGER), average_cost (REAL).
46
  RULES:
47
  1. SELECT only. No INSERT/UPDATE/DELETE.
48
  2. Output ONLY the SQL query. No markdown.
49
  """),
50
+ ("human", "Question: {question}")
51
+ ])
52
+
53
+ sql_generation_chain = text_to_sql_prompt | llm | StrOutputParser()
54
+ logger.info("Successfully initialized Gemini LLM for Text-to-SQL.")
55
+ else:
56
+ logger.warning("GOOGLE_API_KEY not found. Private MCP will use regex fallback.")
57
 
58
+ except Exception as e:
59
+ logger.error(f"Failed to initialize Gemini logic: {e}. Will use regex fallback.")
60
 
61
  # --- FastAPI App ---
62
  app = FastAPI(title="Aegis Private MCP Server")
 
150
  try:
151
  # Step 1: Generate the SQL query using the local LLM
152
  try:
153
+ if sql_generation_chain is None:
154
+ raise ValueError("sql_generation_chain not initialized (missing API key or imports).")
155
  generated_sql = await sql_generation_chain.ainvoke({"question": question})
156
+ logger.info(f"Gemini generated SQL: {generated_sql}")
157
  except Exception as llm_error:
158
  logger.warning(f"LLM generation failed (likely Ollama offline): {llm_error}. Using fallback logic.")
159
  # Fallback Logic: Dynamic symbol extraction
requirements.txt CHANGED
@@ -10,8 +10,8 @@ httpx
10
  alpha_vantage
11
  fastapi
12
  uvicorn[standard]
13
- tavily
14
- langchain_ollama
15
  langchain-google-genai
16
  fpdf2
17
  pdfplumber
@@ -21,3 +21,4 @@ fredapi
21
  jinja2
22
  matplotlib
23
  requests
 
 
10
  alpha_vantage
11
  fastapi
12
  uvicorn[standard]
13
+ tavily-python
14
+ langchain-ollama
15
  langchain-google-genai
16
  fpdf2
17
  pdfplumber
 
21
  jinja2
22
  matplotlib
23
  requests
24
+ toml
tavily_mcp.py CHANGED
@@ -35,7 +35,12 @@ else:
35
 
36
  # --- FastAPI App & Tavily Client ---
37
  app = FastAPI(title="Aegis Tavily MCP Server")
38
- tavily = TavilyClient(api_key=TAVILY_API_KEY)
 
 
 
 
 
39
 
40
  @app.post("/research")
41
  async def perform_research(payload: dict):
@@ -59,6 +64,9 @@ async def perform_research(payload: dict):
59
  # --- THIS IS THE CORRECTED LOGIC ---
60
  all_results = []
61
  try:
 
 
 
62
  # Loop through each query and perform a search
63
  for query in queries:
64
  logger.info(f"Performing search for query: '{query}'")
 
35
 
36
  # --- FastAPI App & Tavily Client ---
37
  app = FastAPI(title="Aegis Tavily MCP Server")
38
+ tavily = None
39
+ if TAVILY_API_KEY:
40
+ try:
41
+ tavily = TavilyClient(api_key=TAVILY_API_KEY)
42
+ except Exception as e:
43
+ logger.error(f"Failed to initialize TavilyClient: {e}")
44
 
45
  @app.post("/research")
46
  async def perform_research(payload: dict):
 
64
  # --- THIS IS THE CORRECTED LOGIC ---
65
  all_results = []
66
  try:
67
+ if not tavily:
68
+ raise ValueError("TavilyClient not initialized (missing API key).")
69
+
70
  # Loop through each query and perform a search
71
  for query in queries:
72
  logger.info(f"Performing search for query: '{query}'")