vn6295337 Claude Opus 4.5 commited on
Commit
30dbe43
·
1 Parent(s): b9618e8

fix: Improve Reddit and yfinance reliability

Browse files

- Reddit: Use browser-like User-Agent to avoid 403 blocks
- Reddit: Add proper HTTP status code logging
- yfinance: Add retry logic with exponential backoff for rate limits

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>

mcp-servers/sentiment-basket/server.py CHANGED
@@ -139,7 +139,10 @@ async def fetch_reddit_posts(ticker: str, company_name: str = "") -> dict:
139
  """
140
  try:
141
  async with httpx.AsyncClient() as client:
142
- headers = {"User-Agent": "SentimentBasket/1.0"}
 
 
 
143
 
144
  subreddits = ["wallstreetbets", "stocks"]
145
  posts_list = []
@@ -160,9 +163,17 @@ async def fetch_reddit_posts(ticker: str, company_name: str = "") -> dict:
160
  try:
161
  response = await client.get(url, headers=headers, params=params, timeout=10)
162
  if response.status_code == 429:
163
- continue # Rate limited, skip this subreddit
 
 
 
 
 
 
 
164
  data = response.json()
165
- except:
 
166
  continue
167
 
168
  posts = data.get("data", {}).get("children", [])
 
139
  """
140
  try:
141
  async with httpx.AsyncClient() as client:
142
+ # Use browser-like User-Agent to avoid Reddit 403 blocks
143
+ headers = {
144
+ "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36"
145
+ }
146
 
147
  subreddits = ["wallstreetbets", "stocks"]
148
  posts_list = []
 
163
  try:
164
  response = await client.get(url, headers=headers, params=params, timeout=10)
165
  if response.status_code == 429:
166
+ logger.warning(f"Reddit rate limited (429) for r/{subreddit}")
167
+ continue
168
+ if response.status_code == 403:
169
+ logger.warning(f"Reddit access forbidden (403) for r/{subreddit}")
170
+ continue
171
+ if response.status_code != 200:
172
+ logger.warning(f"Reddit returned {response.status_code} for r/{subreddit}")
173
+ continue
174
  data = response.json()
175
+ except Exception as e:
176
+ logger.warning(f"Reddit fetch error for r/{subreddit}: {e}")
177
  continue
178
 
179
  posts = data.get("data", {}).get("children", [])
mcp-servers/valuation-basket/server.py CHANGED
@@ -58,14 +58,48 @@ executor = ThreadPoolExecutor(max_workers=2)
58
  # DATA FETCHERS (using yfinance)
59
  # ============================================================
60
 
61
- def _fetch_yfinance_sync(ticker: str) -> dict:
62
  """
63
  Synchronous yfinance fetch (runs in thread pool).
64
  Returns all valuation metrics from Yahoo Finance.
 
65
  """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
66
  try:
67
- tk = yf.Ticker(ticker)
68
- info = tk.info
69
 
70
  if not info or info.get("regularMarketPrice") is None:
71
  return {"error": f"No data found for ticker {ticker}"}
 
58
  # DATA FETCHERS (using yfinance)
59
  # ============================================================
60
 
61
+ def _fetch_yfinance_sync(ticker: str, max_retries: int = 3) -> dict:
62
  """
63
  Synchronous yfinance fetch (runs in thread pool).
64
  Returns all valuation metrics from Yahoo Finance.
65
+ Includes retry logic for rate limiting.
66
  """
67
+ import time
68
+ last_error = None
69
+
70
+ for attempt in range(max_retries):
71
+ try:
72
+ tk = yf.Ticker(ticker)
73
+ info = tk.info
74
+
75
+ # Check if we got rate limited (empty info dict or specific error)
76
+ if not info:
77
+ raise Exception("Empty response from yfinance")
78
+ if "error" in str(info).lower() or "rate" in str(info).lower():
79
+ raise Exception(f"Possible rate limit: {info}")
80
+
81
+ break # Success, exit retry loop
82
+
83
+ except Exception as e:
84
+ last_error = e
85
+ error_str = str(e).lower()
86
+ if "rate" in error_str or "too many" in error_str or "429" in error_str:
87
+ wait_time = (2 ** attempt) + 1 # Exponential backoff: 2, 3, 5 seconds
88
+ logger.warning(f"yfinance rate limited for {ticker}, retrying in {wait_time}s (attempt {attempt + 1}/{max_retries})")
89
+ time.sleep(wait_time)
90
+ continue
91
+ else:
92
+ # Non-rate-limit error, don't retry
93
+ logger.error(f"yfinance fetch error for {ticker}: {e}")
94
+ return {"error": str(e)}
95
+
96
+ # If we exhausted retries
97
+ if last_error and not info:
98
+ logger.error(f"yfinance fetch failed after {max_retries} retries for {ticker}: {last_error}")
99
+ return {"error": str(last_error)}
100
+
101
  try:
102
+ # Proceed with parsing info
 
103
 
104
  if not info or info.get("regularMarketPrice") is None:
105
  return {"error": f"No data found for ticker {ticker}"}