Parthiban97 commited on
Commit
94dbd6f
Β·
verified Β·
1 Parent(s): a9d9774

Upload 2 files

Browse files
Files changed (2) hide show
  1. src/csv_enrichment.py +5 -1
  2. src/pe_pb_engine.py +602 -165
src/csv_enrichment.py CHANGED
@@ -323,7 +323,11 @@ def _triage_missing_cells(
323
  if not is_missing_val:
324
  continue
325
 
326
- if is_young:
 
 
 
 
327
  cells.append(TriagedCell(
328
  row_idx=idx, fund_name=fund, category=cat, column=col,
329
  current_value=raw, label=TRIAGE_YOUNG,
 
323
  if not is_missing_val:
324
  continue
325
 
326
+ # PE/PB are point-in-time portfolio metrics β€” fund age is irrelevant.
327
+ # Always attempt to fetch them regardless of how young the fund is.
328
+ age_exempt = col in ("P/E Ratio", "P/B Ratio")
329
+
330
+ if is_young and not age_exempt:
331
  cells.append(TriagedCell(
332
  row_idx=idx, fund_name=fund, category=cat, column=col,
333
  current_value=raw, label=TRIAGE_YOUNG,
src/pe_pb_engine.py CHANGED
@@ -1,146 +1,72 @@
1
  """
2
- pe_pb_engine.py β€” Fast P/E and P/B for Indian mutual funds via NSE index API.
3
 
4
- Approach:
5
- 1. Fetch all index PE/PB from NSE /api/allIndices (one request, ~110KB)
6
- 2. Map each fund's benchmark to an NSE index name
7
- 3. fund_PE = benchmark_index_PE (industry-standard proxy)
8
 
9
- Covers ~100% of equity funds. Debt/liquid/hybrid benchmarks skipped (no PE).
10
- Results cached 1 day in Neon.
 
 
 
 
 
 
 
 
 
 
 
 
 
11
 
12
  Usage:
13
  from src.pe_pb_engine import fetch_pe_pb, warm_index_cache
14
- pe, pb = fetch_pe_pb(benchmark_type="NIFTY 100 TRI")
 
 
 
 
 
15
  """
16
 
17
  from __future__ import annotations
18
 
 
19
  import json
20
  import os
21
  import re
22
- import time
23
  import threading
 
 
24
  from typing import Optional
25
 
 
26
  import requests
 
27
 
28
- # ── Neon cache ────────────────────────────────────────────────────────────────
29
- _INDEX_PE_TTL = 24 * 3600 # 1 day
30
 
31
- def _get_db():
32
- url = os.environ.get("DATABASE_URL", "")
33
- if not url:
34
- return None
35
- try:
36
- import psycopg2
37
- return psycopg2.connect(url)
38
- except Exception:
39
- return None
40
 
41
- def _cache_get(key: str) -> Optional[str]:
42
- conn = _get_db()
43
- if not conn:
44
- return None
45
- try:
46
- cur = conn.cursor()
47
- cur.execute("SELECT data, ts FROM nav_cache WHERE key = %s", (key,))
48
- row = cur.fetchone()
49
- conn.close()
50
- if not row:
51
- return None
52
- data, ts = row
53
- if (time.time() - ts) > _INDEX_PE_TTL:
54
- return None
55
- return data
56
- except Exception:
57
- return None
58
 
59
- def _cache_set(key: str, value: str) -> None:
60
- conn = _get_db()
61
- if not conn:
62
- return
63
- try:
64
- cur = conn.cursor()
65
- cur.execute(
66
- "INSERT INTO nav_cache (key, data, ts) VALUES (%s, %s, %s) "
67
- "ON CONFLICT (key) DO UPDATE SET data = EXCLUDED.data, ts = EXCLUDED.ts",
68
- (key, value, time.time()),
69
- )
70
- conn.commit()
71
- conn.close()
72
- except Exception:
73
- pass
74
 
75
- # ── In-process cache ──────────────────────────────────────────────────────────
76
- _INDEX_PE_CACHE: dict[str, tuple[float, float]] = {}
77
- _CACHE_LOADED = False
78
- _CACHE_LOCK = threading.Lock()
79
 
80
- # ── Benchmark normalisation map β†’ exact NSE index name ───────────────────────
81
- _BENCHMARK_MAP: dict[str, str] = {
82
- "NIFTY 50": "NIFTY 50",
83
- "NIFTY 100": "NIFTY 100",
84
- "NIFTY 200": "NIFTY 200",
85
- "NIFTY 500": "NIFTY 500",
86
- "NIFTY NEXT 50": "NIFTY NEXT 50",
87
- "NIFTY TOTAL MARKET": "NIFTY TOTAL MARKET",
88
- "NIFTY MIDCAP 50": "NIFTY MIDCAP 50",
89
- "NIFTY MIDCAP 100": "NIFTY MIDCAP 100",
90
- "NIFTY MIDCAP 150": "NIFTY MIDCAP 150",
91
- "NIFTY SMALLCAP 50": "NIFTY SMALLCAP 50",
92
- "NIFTY SMALLCAP 100": "NIFTY SMALLCAP 100",
93
- "NIFTY SMALLCAP 250": "NIFTY SMALLCAP 250",
94
- "NIFTY MIDSMALLCAP 400": "NIFTY MIDSMALLCAP 400",
95
- "NIFTY LARGEMIDCAP 250": "NIFTY LARGEMIDCAP 250",
96
- "NIFTY LARGE MIDCAP 250": "NIFTY LARGEMIDCAP 250",
97
- "NIFTY LARGE - MIDCAP 250": "NIFTY LARGEMIDCAP 250",
98
- "NIFTY500 MULTICAP 50:25:25": "NIFTY500 MULTICAP 50:25:25",
99
- "NIFTY500 MULTICAP MOMENTUM QUALITY 50": "NIFTY500 MULTICAP MOMENTUM QUALITY 50",
100
- "NIFTY BANK": "NIFTY BANK",
101
- "NIFTY FINANCIAL SERVICES": "NIFTY FINANCIAL SERVICES",
102
- "NIFTY FINANCIAL SERVICES 25/50": "NIFTY FINANCIAL SERVICES 25/50",
103
- "NIFTY FINANCIAL SERVICES EX-BANK": "NIFTY FINANCIAL SERVICES EX-BANK",
104
- "NIFTY PRIVATE BANK": "NIFTY PRIVATE BANK",
105
- "NIFTY PSU BANK": "NIFTY PSU BANK",
106
- "NIFTY IT": "NIFTY IT",
107
- "NIFTY FMCG": "NIFTY FMCG",
108
- "NIFTY PHARMA": "NIFTY PHARMA",
109
- "NIFTY HEALTHCARE INDEX": "NIFTY HEALTHCARE INDEX",
110
- "NIFTY HEALTHCARE": "NIFTY HEALTHCARE INDEX",
111
- "NIFTY AUTO": "NIFTY AUTO",
112
- "NIFTY METAL": "NIFTY METAL",
113
- "NIFTY REALTY": "NIFTY REALTY",
114
- "NIFTY INFRASTRUCTURE": "NIFTY INFRASTRUCTURE",
115
- "NIFTY COMMODITIES": "NIFTY COMMODITIES",
116
- "NIFTY ENERGY": "NIFTY ENERGY",
117
- "NIFTY OIL & GAS": "NIFTY OIL & GAS",
118
- "NIFTY MNC": "NIFTY MNC",
119
- "NIFTY CPSE": "NIFTY CPSE",
120
- "NIFTY PSE": "NIFTY PSE",
121
- "NIFTY INDIA CONSUMPTION": "NIFTY INDIA CONSUMPTION",
122
- "NIFTY INDIA MANUFACTURING": "NIFTY INDIA MANUFACTURING",
123
- "NIFTY INDIA DEFENCE": "NIFTY INDIA DEFENCE",
124
- "NIFTY HOUSING": "NIFTY HOUSING",
125
- "NIFTY CORE HOUSING": "NIFTY CORE HOUSING",
126
- "NIFTY IPO": "NIFTY IPO",
127
- "NIFTY TRANSPORTATION & LOGISTICS": "NIFTY TRANSPORTATION & LOGISTICS",
128
- "NIFTY CAPITAL MARKETS": "NIFTY CAPITAL MARKETS",
129
- "NIFTY MOBILITY": "NIFTY MOBILITY",
130
- "NIFTY RURAL": "NIFTY RURAL",
131
- "NIFTY MEDIA": "NIFTY MEDIA",
132
- "NIFTY CONSUMER DURABLES": "NIFTY CONSUMER DURABLES",
133
- "NIFTY CHEMICALS": "NIFTY CHEMICALS",
134
- "NIFTY100 LOW VOLATILITY 30": "NIFTY100 LOW VOLATILITY 30",
135
- "NIFTY 100 LOW VOLATILITY 30": "NIFTY100 LOW VOLATILITY 30",
136
- "NIFTY100 ESG": "NIFTY100 ESG",
137
- "NIFTY 100 ESG": "NIFTY100 ESG",
138
- "NIFTY100 ESG SECTOR LEADERS": "NIFTY100 ESG SECTOR LEADERS",
139
- "NIFTY200 MOMENTUM 30": "NIFTY200 MOMENTUM 30",
140
- "NIFTY 200 MOMENTUM 30": "NIFTY200 MOMENTUM 30",
141
- }
142
 
143
- # Tokens that indicate a debt/liquid/hybrid/foreign benchmark β€” skip PE
144
  _NO_PE_TOKENS = {
145
  "CRISIL", "G-SEC", "G SEC", "GSEC", "SDL", "GILT",
146
  "LIQUID", "OVERNIGHT", "1D RATE", "ARBITRAGE",
@@ -149,6 +75,45 @@ _NO_PE_TOKENS = {
149
  "COM.ADVISORKHOJ",
150
  }
151
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
152
 
153
  def _normalize_benchmark(bm: str) -> str:
154
  s = re.sub(r'\s+TRI\.?\s*$', '', bm.strip(), flags=re.IGNORECASE)
@@ -162,32 +127,126 @@ def _normalize_benchmark(bm: str) -> str:
162
  return s
163
 
164
 
165
- def _is_no_pe_benchmark(bm: str) -> bool:
166
- bm_upper = bm.upper()
167
- return any(token in bm_upper for token in _NO_PE_TOKENS)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
168
 
169
 
170
  # ── NSE session ──────────────────────────────────────────��────────────────────
171
- _NSE_SESSION: Optional[requests.Session] = None
172
  _NSE_SESSION_TS = 0.0
173
  _NSE_LOCK = threading.Lock()
174
- _NSE_SESSION_TTL = 300
175
-
176
 
177
  def _get_nse_session() -> requests.Session:
178
  global _NSE_SESSION, _NSE_SESSION_TS
179
  with _NSE_LOCK:
180
- if _NSE_SESSION is None or (time.time() - _NSE_SESSION_TS) > _NSE_SESSION_TTL:
181
  s = requests.Session()
182
  s.headers.update({
183
- "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
184
- "AppleWebKit/537.36 Chrome/120.0.0.0 Safari/537.36",
185
- "Accept": "application/json, */*",
186
- "Referer": "https://www.nseindia.com/",
 
 
187
  })
188
  try:
189
  s.get("https://www.nseindia.com/", timeout=10)
190
- time.sleep(0.5)
191
  except Exception:
192
  pass
193
  _NSE_SESSION = s
@@ -195,19 +254,23 @@ def _get_nse_session() -> requests.Session:
195
  return _NSE_SESSION
196
 
197
 
 
 
 
 
198
  def _fetch_all_index_pe() -> dict[str, tuple[float, float]]:
199
- """One NSE API call β†’ PE/PB for all 135 indices."""
200
- cache_key = "nse_index_pe_pb"
201
- cached = _cache_get(cache_key)
202
  if cached:
203
  data = json.loads(cached)
204
- print(f" [pe_pb] {len(data)} index PE/PB loaded from Neon cache")
205
  return {k: tuple(v) for k, v in data.items()}
206
 
207
- print(" [pe_pb] Fetching NSE allIndices (one-time per day)...")
208
  try:
209
  r = _get_nse_session().get(
210
- "https://www.nseindia.com/api/allIndices", timeout=15)
 
211
  r.raise_for_status()
212
  indices = r.json().get("data", [])
213
  except Exception as e:
@@ -222,8 +285,10 @@ def _fetch_all_index_pe() -> dict[str, tuple[float, float]]:
222
  if pe in ("-", None, "", "0") or pb in ("-", None, ""):
223
  continue
224
  try:
225
- result[name] = (float(str(pe).replace(",", "")),
226
- float(str(pb).replace(",", "")))
 
 
227
  except (ValueError, TypeError):
228
  pass
229
 
@@ -234,29 +299,16 @@ def _fetch_all_index_pe() -> dict[str, tuple[float, float]]:
234
 
235
 
236
  def warm_index_cache() -> dict[str, tuple[float, float]]:
237
- """Load index PE/PB into process memory. Call once at pipeline startup."""
238
- global _INDEX_PE_CACHE, _CACHE_LOADED
239
  with _CACHE_LOCK:
240
- if not _CACHE_LOADED:
241
  _INDEX_PE_CACHE = _fetch_all_index_pe()
242
- _CACHE_LOADED = True
243
  return _INDEX_PE_CACHE
244
 
245
 
246
- # ── Public API ────────────────────────────────────────────────────────────────
247
-
248
- def fetch_pe_pb(
249
- benchmark_type: str,
250
- scheme_code: str = "", # unused, kept for API compat
251
- fund_name: str = "",
252
- ) -> tuple[Optional[float], Optional[float]]:
253
- """
254
- Return (pe, pb) for a fund given its benchmark index name.
255
- Returns (None, None) for debt/liquid/hybrid or unrecognised benchmarks.
256
- """
257
- if not benchmark_type or not benchmark_type.strip():
258
- return None, None
259
- if _is_no_pe_benchmark(benchmark_type):
260
  return None, None
261
 
262
  index_map = warm_index_cache()
@@ -266,7 +318,6 @@ def fetch_pe_pb(
266
  norm = _normalize_benchmark(benchmark_type)
267
  nse_name = _BENCHMARK_MAP.get(norm)
268
 
269
- # Fuzzy fallback
270
  if not nse_name:
271
  norm_upper = norm.upper()
272
  for idx_name in index_map:
@@ -283,19 +334,405 @@ def fetch_pe_pb(
283
  if not nse_name or nse_name not in index_map:
284
  return None, None
285
 
286
- pe, pb = index_map[nse_name]
287
- return pe, pb
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
288
 
289
 
290
  def batch_fetch_pe_pb(
291
  fund_benchmarks: dict[str, str],
 
 
292
  ) -> dict[str, tuple[Optional[float], Optional[float]]]:
293
  """
294
  {fund_name: benchmark_type} β†’ {fund_name: (pe, pb)}
295
- All lookups share one in-process cache β€” essentially zero latency.
 
 
 
296
  """
 
 
297
  warm_index_cache()
298
- return {
299
- name: fetch_pe_pb(bm, fund_name=name)
300
- for name, bm in fund_benchmarks.items()
301
- }
 
 
 
 
 
 
 
 
 
1
  """
2
+ pe_pb_engine.py β€” P/E and P/B for Indian mutual funds.
3
 
4
+ Two-track approach:
5
+ ACTIVE funds β†’ AMFI monthly portfolio holdings + NSE/yfinance stock PE/PB
6
+ Weighted average: Portfolio PE = Ξ£ (weight% Γ— stock PE)
7
+ This is identical to what Groww shows (same AMFI source).
8
 
9
+ INDEX funds β†’ NSE allIndices API (benchmark index PE/PB)
10
+ Fast, real-time, already accurate since fund mirrors index.
11
+
12
+ Active vs Index detection:
13
+ Category contains "Index Fund", "ETF", "Exchange Traded" β†’ INDEX track
14
+ Everything else β†’ ACTIVE track
15
+
16
+ AMFI holdings URL pattern:
17
+ https://portal.amfiindia.com/spages/am{mon}{year}repo.xls
18
+ e.g. amfeb2026repo.xls (February 2026 data)
19
+
20
+ Caching:
21
+ - AMFI XLS : 30 days in Neon/SQLite (monthly data β€” no point refreshing sooner)
22
+ - Stock PE/PB : 1 day in Neon/SQLite (NSE stock data changes daily)
23
+ - Index PE/PB : 1 day in Neon/SQLite (existing behaviour)
24
 
25
  Usage:
26
  from src.pe_pb_engine import fetch_pe_pb, warm_index_cache
27
+ pe, pb = fetch_pe_pb(
28
+ benchmark_type="NIFTY 100 TRI",
29
+ fund_name="Mirae Asset Large Cap Fund",
30
+ category="Equity: Large Cap",
31
+ scheme_isin="INF769K01036", # optional β€” improves AMFI matching
32
+ )
33
  """
34
 
35
  from __future__ import annotations
36
 
37
+ import io
38
  import json
39
  import os
40
  import re
 
41
  import threading
42
+ import time
43
+ from datetime import datetime
44
  from typing import Optional
45
 
46
+ import pandas as pd
47
  import requests
48
+ import yfinance as yf
49
 
 
 
50
 
51
+ # ── TTLs ─────────────────────────────────────────────────────────────────────
52
+ _INDEX_PE_TTL = 24 * 3600 # 1 day β€” index PE/PB
53
+ _STOCK_PE_TTL = 24 * 3600 # 1 day β€” individual stock PE/PB
54
+ _AMFI_XLS_TTL = 30 * 24 * 3600 # 30 days β€” AMFI monthly holdings XLS
 
 
 
 
 
55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
57
+ # ── Index fund category detection ─────────────────────────────────────────────
58
+ _INDEX_FUND_TOKENS = {
59
+ "INDEX FUND", "ETF", "EXCHANGE TRADED", "INDEX - DOMESTIC",
60
+ "INDEX - INTERNATIONAL", "OTHER ETFS", "GOLD ETF", "SILVER ETF",
61
+ "FUND OF FUNDS",
62
+ }
 
 
 
 
 
 
 
 
 
63
 
64
+ def _is_index_fund(category: str) -> bool:
65
+ cat_upper = (category or "").upper()
66
+ return any(token in cat_upper for token in _INDEX_FUND_TOKENS)
 
67
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
68
 
69
+ # ── No-PE benchmark tokens (debt/liquid/hybrid) ────────────────────────────────
70
  _NO_PE_TOKENS = {
71
  "CRISIL", "G-SEC", "G SEC", "GSEC", "SDL", "GILT",
72
  "LIQUID", "OVERNIGHT", "1D RATE", "ARBITRAGE",
 
75
  "COM.ADVISORKHOJ",
76
  }
77
 
78
+ def _is_no_pe_benchmark(bm: str) -> bool:
79
+ bm_upper = bm.upper()
80
+ return any(token in bm_upper for token in _NO_PE_TOKENS)
81
+
82
+
83
+ # ── NSE index benchmark map ────────────────────────────────────────────────────
84
+ _BENCHMARK_MAP: dict[str, str] = {
85
+ "NIFTY 50": "NIFTY 50", "NIFTY 100": "NIFTY 100",
86
+ "NIFTY 200": "NIFTY 200", "NIFTY 500": "NIFTY 500",
87
+ "NIFTY NEXT 50": "NIFTY NEXT 50", "NIFTY TOTAL MARKET": "NIFTY TOTAL MARKET",
88
+ "NIFTY MIDCAP 50": "NIFTY MIDCAP 50", "NIFTY MIDCAP 100": "NIFTY MIDCAP 100",
89
+ "NIFTY MIDCAP 150": "NIFTY MIDCAP 150",
90
+ "NIFTY SMALLCAP 50": "NIFTY SMALLCAP 50",
91
+ "NIFTY SMALLCAP 100": "NIFTY SMALLCAP 100",
92
+ "NIFTY SMALLCAP 250": "NIFTY SMALLCAP 250",
93
+ "NIFTY MIDSMALLCAP 400": "NIFTY MIDSMALLCAP 400",
94
+ "NIFTY LARGEMIDCAP 250": "NIFTY LARGEMIDCAP 250",
95
+ "NIFTY LARGE MIDCAP 250": "NIFTY LARGEMIDCAP 250",
96
+ "NIFTY LARGE - MIDCAP 250": "NIFTY LARGEMIDCAP 250",
97
+ "NIFTY500 MULTICAP 50:25:25": "NIFTY500 MULTICAP 50:25:25",
98
+ "NIFTY BANK": "NIFTY BANK",
99
+ "NIFTY FINANCIAL SERVICES": "NIFTY FINANCIAL SERVICES",
100
+ "NIFTY IT": "NIFTY IT", "NIFTY FMCG": "NIFTY FMCG",
101
+ "NIFTY PHARMA": "NIFTY PHARMA", "NIFTY HEALTHCARE INDEX": "NIFTY HEALTHCARE INDEX",
102
+ "NIFTY HEALTHCARE": "NIFTY HEALTHCARE INDEX",
103
+ "NIFTY AUTO": "NIFTY AUTO", "NIFTY METAL": "NIFTY METAL",
104
+ "NIFTY REALTY": "NIFTY REALTY", "NIFTY INFRASTRUCTURE": "NIFTY INFRASTRUCTURE",
105
+ "NIFTY COMMODITIES": "NIFTY COMMODITIES", "NIFTY ENERGY": "NIFTY ENERGY",
106
+ "NIFTY OIL & GAS": "NIFTY OIL & GAS", "NIFTY MNC": "NIFTY MNC",
107
+ "NIFTY CPSE": "NIFTY CPSE", "NIFTY PSE": "NIFTY PSE",
108
+ "NIFTY INDIA CONSUMPTION": "NIFTY INDIA CONSUMPTION",
109
+ "NIFTY INDIA MANUFACTURING": "NIFTY INDIA MANUFACTURING",
110
+ "NIFTY INDIA DEFENCE": "NIFTY INDIA DEFENCE",
111
+ "NIFTY HOUSING": "NIFTY HOUSING",
112
+ "NIFTY100 LOW VOLATILITY 30": "NIFTY100 LOW VOLATILITY 30",
113
+ "NIFTY 100 LOW VOLATILITY 30": "NIFTY100 LOW VOLATILITY 30",
114
+ "NIFTY200 MOMENTUM 30": "NIFTY200 MOMENTUM 30",
115
+ "NIFTY 200 MOMENTUM 30": "NIFTY200 MOMENTUM 30",
116
+ }
117
 
118
  def _normalize_benchmark(bm: str) -> str:
119
  s = re.sub(r'\s+TRI\.?\s*$', '', bm.strip(), flags=re.IGNORECASE)
 
127
  return s
128
 
129
 
130
+ # ── DB cache (SQLite local / Neon postgres production) ────────────────────────
131
+ _DATABASE_URL = os.environ.get("DATABASE_URL", "")
132
+ _USE_POSTGRES = bool(_DATABASE_URL)
133
+ import threading as _threading
134
+ _tls = _threading.local()
135
+
136
+
137
+ def _get_conn():
138
+ if _USE_POSTGRES:
139
+ import psycopg2
140
+ conn = getattr(_tls, "pg_conn", None)
141
+ if conn is None or conn.closed:
142
+ conn = psycopg2.connect(_DATABASE_URL, connect_timeout=10)
143
+ _tls.pg_conn = conn
144
+ try:
145
+ conn.cursor().execute("SELECT 1")
146
+ except Exception:
147
+ conn = psycopg2.connect(_DATABASE_URL, connect_timeout=10)
148
+ _tls.pg_conn = conn
149
+ return conn, True
150
+ else:
151
+ import sqlite3
152
+ from pathlib import Path
153
+ db_path = Path.home() / ".mf_nav_cache.db"
154
+ return sqlite3.connect(str(db_path)), False
155
+
156
+
157
+ def _cache_get(key: str, ttl: float) -> Optional[str]:
158
+ try:
159
+ conn, is_pg = _get_conn()
160
+ ph = "%s" if is_pg else "?"
161
+ if is_pg:
162
+ with conn.cursor() as cur:
163
+ cur.execute(f"SELECT data, ts FROM nav_cache WHERE key = {ph}", (key,))
164
+ row = cur.fetchone()
165
+ else:
166
+ with conn:
167
+ row = conn.execute(
168
+ f"SELECT data, ts FROM nav_cache WHERE key = {ph}", (key,)
169
+ ).fetchone()
170
+ if row and (time.time() - row[1]) < ttl:
171
+ return row[0]
172
+ except Exception:
173
+ pass
174
+ return None
175
+
176
+
177
+ def _cache_set(key: str, value: str) -> None:
178
+ try:
179
+ conn, is_pg = _get_conn()
180
+ ph = "%s" if is_pg else "?"
181
+ sql = (
182
+ f"INSERT INTO nav_cache (key, data, ts) VALUES ({ph},{ph},{ph}) "
183
+ f"ON CONFLICT (key) DO UPDATE SET data=EXCLUDED.data, ts=EXCLUDED.ts"
184
+ if is_pg else
185
+ f"INSERT OR REPLACE INTO nav_cache (key, data, ts) VALUES ({ph},{ph},{ph})"
186
+ )
187
+ if is_pg:
188
+ with conn.cursor() as cur:
189
+ cur.execute(sql, (key, value, time.time()))
190
+ conn.commit()
191
+ else:
192
+ with conn:
193
+ conn.execute(sql, (key, value, time.time()))
194
+ except Exception:
195
+ pass
196
+
197
+
198
+ def _init_cache_db() -> None:
199
+ try:
200
+ conn, is_pg = _get_conn()
201
+ sql = """CREATE TABLE IF NOT EXISTS nav_cache (
202
+ key TEXT PRIMARY KEY,
203
+ data TEXT NOT NULL,
204
+ ts DOUBLE PRECISION NOT NULL
205
+ )"""
206
+ if is_pg:
207
+ with conn.cursor() as cur:
208
+ cur.execute(sql)
209
+ conn.commit()
210
+ else:
211
+ with conn:
212
+ conn.execute(sql)
213
+ except Exception:
214
+ pass
215
+
216
+ try:
217
+ _init_cache_db()
218
+ except Exception:
219
+ pass
220
+
221
+
222
+ # ── In-process caches ─────────────────────────────────────────────────────────
223
+ _INDEX_PE_CACHE: dict[str, tuple[float, float]] = {}
224
+ _STOCK_PE_CACHE: dict[str, tuple[float | None, float | None]] = {}
225
+ _AMFI_HOLD_CACHE: dict[str, pd.DataFrame] = {} # scheme_isin/name β†’ holdings df
226
+ _CACHE_LOCK = threading.Lock()
227
 
228
 
229
  # ── NSE session ──────────────────────────────────────────��────────────────────
230
+ _NSE_SESSION: Optional[requests.Session] = None
231
  _NSE_SESSION_TS = 0.0
232
  _NSE_LOCK = threading.Lock()
 
 
233
 
234
  def _get_nse_session() -> requests.Session:
235
  global _NSE_SESSION, _NSE_SESSION_TS
236
  with _NSE_LOCK:
237
+ if _NSE_SESSION is None or (time.time() - _NSE_SESSION_TS) > 300:
238
  s = requests.Session()
239
  s.headers.update({
240
+ "User-Agent": (
241
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
242
+ "AppleWebKit/537.36 Chrome/120.0.0.0 Safari/537.36"
243
+ ),
244
+ "Accept": "application/json, */*",
245
+ "Referer": "https://www.nseindia.com/",
246
  })
247
  try:
248
  s.get("https://www.nseindia.com/", timeout=10)
249
+ time.sleep(0.3)
250
  except Exception:
251
  pass
252
  _NSE_SESSION = s
 
254
  return _NSE_SESSION
255
 
256
 
257
+ # ═══════════════════════════════════════════════════════════════════════════════
258
+ # TRACK 1 β€” INDEX funds: NSE allIndices benchmark PE/PB
259
+ # ═══════════════════════════════════════════════════════════════════════════════
260
+
261
  def _fetch_all_index_pe() -> dict[str, tuple[float, float]]:
262
+ """Fetch PE/PB for all NSE indices in one API call. Cached 1 day."""
263
+ cache_key = "nse_index_pe_pb_v2"
264
+ cached = _cache_get(cache_key, _INDEX_PE_TTL)
265
  if cached:
266
  data = json.loads(cached)
 
267
  return {k: tuple(v) for k, v in data.items()}
268
 
269
+ print(" [pe_pb] Fetching NSE allIndices...")
270
  try:
271
  r = _get_nse_session().get(
272
+ "https://www.nseindia.com/api/allIndices", timeout=15
273
+ )
274
  r.raise_for_status()
275
  indices = r.json().get("data", [])
276
  except Exception as e:
 
285
  if pe in ("-", None, "", "0") or pb in ("-", None, ""):
286
  continue
287
  try:
288
+ result[name] = (
289
+ float(str(pe).replace(",", "")),
290
+ float(str(pb).replace(",", "")),
291
+ )
292
  except (ValueError, TypeError):
293
  pass
294
 
 
299
 
300
 
301
  def warm_index_cache() -> dict[str, tuple[float, float]]:
302
+ global _INDEX_PE_CACHE
 
303
  with _CACHE_LOCK:
304
+ if not _INDEX_PE_CACHE:
305
  _INDEX_PE_CACHE = _fetch_all_index_pe()
 
306
  return _INDEX_PE_CACHE
307
 
308
 
309
+ def _fetch_index_pe_pb(benchmark_type: str) -> tuple[Optional[float], Optional[float]]:
310
+ """Return PE/PB for a fund via its benchmark index (INDEX fund track)."""
311
+ if not benchmark_type or _is_no_pe_benchmark(benchmark_type):
 
 
 
 
 
 
 
 
 
 
 
312
  return None, None
313
 
314
  index_map = warm_index_cache()
 
318
  norm = _normalize_benchmark(benchmark_type)
319
  nse_name = _BENCHMARK_MAP.get(norm)
320
 
 
321
  if not nse_name:
322
  norm_upper = norm.upper()
323
  for idx_name in index_map:
 
334
  if not nse_name or nse_name not in index_map:
335
  return None, None
336
 
337
+ return index_map[nse_name]
338
+
339
+
340
+ # ═══════════════════════════════════════════════════════════════════════════════
341
+ # TRACK 2 β€” ACTIVE funds: AMFI holdings + stock PE/PB
342
+ # ═══════════════════════════════════════════════════════════════════════════════
343
+
344
+ def _amfi_xls_url(year: int | None = None, month: int | None = None) -> str:
345
+ """
346
+ Build AMFI monthly portfolio XLS URL.
347
+ Defaults to the most recently completed month's disclosure.
348
+ AMFI publishes by 10th of the following month, so:
349
+ - If today >= 10th: use last month
350
+ - If today < 10th: use month before last
351
+ """
352
+ now = datetime.now()
353
+ if year is None or month is None:
354
+ if now.day >= 10:
355
+ # Last month is fully published
356
+ ref = now.replace(day=1) - pd.DateOffset(months=1)
357
+ else:
358
+ # Still waiting for last month's β€” use month before last
359
+ ref = now.replace(day=1) - pd.DateOffset(months=2)
360
+ year = int(ref.year)
361
+ month = int(ref.month)
362
+
363
+ month_abbr = {
364
+ 1: "jan", 2: "feb", 3: "mar", 4: "apr",
365
+ 5: "may", 6: "jun", 7: "jul", 8: "aug",
366
+ 9: "sep", 10: "oct", 11: "nov", 12: "dec",
367
+ }[month]
368
+ yr2 = str(year)[-2:] # "2026" β†’ "26"
369
+ return f"https://portal.amfiindia.com/spages/am{month_abbr}{year}repo.xls"
370
+
371
+
372
+ def _download_amfi_xls() -> Optional[bytes]:
373
+ """Download AMFI monthly portfolio XLS. Returns raw bytes or None."""
374
+ url = _amfi_xls_url()
375
+ cache_key = f"amfi_xls:{url}"
376
+
377
+ cached = _cache_get(cache_key, _AMFI_XLS_TTL)
378
+ if cached:
379
+ print(f" [amfi] XLS loaded from cache ({url.split('/')[-1]})")
380
+ return bytes.fromhex(cached)
381
+
382
+ print(f" [amfi] Downloading {url.split('/')[-1]}...")
383
+ headers = {
384
+ "User-Agent": (
385
+ "Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
386
+ "AppleWebKit/537.36 Chrome/120.0.0.0 Safari/537.36"
387
+ ),
388
+ "Referer": "https://www.amfiindia.com/",
389
+ }
390
+ try:
391
+ r = requests.get(url, headers=headers, timeout=60)
392
+ r.raise_for_status()
393
+ raw = r.content
394
+ print(f" [amfi] Downloaded {len(raw):,} bytes")
395
+ _cache_set(cache_key, raw.hex())
396
+ return raw
397
+ except Exception as e:
398
+ print(f" [amfi] Download failed: {e}")
399
+ return None
400
+
401
+
402
+ def _parse_amfi_xls(raw: bytes) -> dict[str, pd.DataFrame]:
403
+ """
404
+ Parse AMFI monthly portfolio XLS.
405
+
406
+ The XLS has one sheet. Structure (repeating for each scheme):
407
+ Row N: Scheme name header line (e.g. "HDFC Large Cap Fund - Growth")
408
+ Row N+1: Column headers (Issuer Name | ISIN | ... | % to NAV)
409
+ Row N+2..: Holdings rows
410
+ (blank row separates schemes)
411
+
412
+ Returns: {scheme_name_upper: DataFrame with columns [isin, weight_pct]}
413
+ """
414
+ try:
415
+ df_raw = pd.read_excel(io.BytesIO(raw), header=None, dtype=str)
416
+ except Exception as e:
417
+ print(f" [amfi] XLS parse failed: {e}")
418
+ return {}
419
+
420
+ schemes: dict[str, pd.DataFrame] = {}
421
+ current_scheme = None
422
+ header_row = None
423
+ isin_col = None
424
+ weight_col = None
425
+ holding_rows: list[dict] = []
426
+
427
+ def _flush():
428
+ nonlocal current_scheme, header_row, isin_col, weight_col, holding_rows
429
+ if current_scheme and holding_rows:
430
+ schemes[current_scheme.upper()] = pd.DataFrame(holding_rows)
431
+ current_scheme = None
432
+ header_row = None
433
+ isin_col = None
434
+ weight_col = None
435
+ holding_rows = []
436
+
437
+ for _, row in df_raw.iterrows():
438
+ cells = [str(c).strip() if pd.notna(c) else "" for c in row]
439
+ non_empty = [c for c in cells if c]
440
+
441
+ # Blank row β†’ flush current scheme
442
+ if not non_empty:
443
+ _flush()
444
+ continue
445
+
446
+ # Detect column header row (contains "ISIN" and "% to NAV" or "% To NAV")
447
+ cells_upper = [c.upper() for c in cells]
448
+ if "ISIN" in cells_upper and any("% TO NAV" in c for c in cells_upper):
449
+ try:
450
+ isin_col = cells_upper.index("ISIN")
451
+ weight_col = next(
452
+ i for i, c in enumerate(cells_upper) if "% TO NAV" in c
453
+ )
454
+ header_row = True
455
+ except (ValueError, StopIteration):
456
+ pass
457
+ continue
458
+
459
+ # If we have headers, this is a data row
460
+ if header_row and isin_col is not None and weight_col is not None:
461
+ isin = cells[isin_col] if isin_col < len(cells) else ""
462
+ weight = cells[weight_col] if weight_col < len(cells) else ""
463
+
464
+ # Valid ISIN: starts with IN + 10 alphanumeric chars
465
+ if re.match(r'^IN[A-Z0-9]{10}$', isin):
466
+ try:
467
+ w = float(str(weight).replace(",", ""))
468
+ if w > 0:
469
+ holding_rows.append({"isin": isin, "weight_pct": w})
470
+ except (ValueError, TypeError):
471
+ pass
472
+ continue
473
+
474
+ # Scheme name line: long text in first cell, not all-caps header
475
+ first = cells[0] if cells else ""
476
+ if (
477
+ len(first) > 15
478
+ and not first.startswith("Scheme")
479
+ and not first.startswith("Fund")
480
+ and "%" not in first
481
+ and header_row is None
482
+ and current_scheme is None
483
+ ):
484
+ current_scheme = first
485
+ continue
486
+
487
+ _flush() # flush last scheme
488
+ print(f" [amfi] Parsed {len(schemes)} schemes from XLS")
489
+ return schemes
490
+
491
+
492
+ # ── AMFI holdings cache (process-level) ───────────────────────────────────────
493
+ _AMFI_SCHEMES: dict[str, pd.DataFrame] = {} # upper scheme name β†’ df
494
+ _AMFI_SCHEMES_LOCK = threading.Lock()
495
+ _AMFI_LOADED = False
496
+
497
+
498
+ def _ensure_amfi_loaded() -> dict[str, pd.DataFrame]:
499
+ global _AMFI_SCHEMES, _AMFI_LOADED
500
+ with _AMFI_SCHEMES_LOCK:
501
+ if not _AMFI_LOADED:
502
+ raw = _download_amfi_xls()
503
+ if raw:
504
+ _AMFI_SCHEMES = _parse_amfi_xls(raw)
505
+ _AMFI_LOADED = True
506
+ return _AMFI_SCHEMES
507
+
508
+
509
+ def _find_scheme_holdings(fund_name: str, scheme_isin: str = "") -> Optional[pd.DataFrame]:
510
+ """
511
+ Look up holdings for a fund from the AMFI XLS.
512
+ Tries ISIN match first (exact), then fuzzy name match.
513
+ """
514
+ schemes = _ensure_amfi_loaded()
515
+ if not schemes:
516
+ return None
517
+
518
+ # Fuzzy name match: normalise both sides
519
+ def _norm(s: str) -> str:
520
+ return re.sub(r'[^a-z0-9]', '', s.lower())
521
+
522
+ fund_norm = _norm(fund_name)
523
+
524
+ best_match: Optional[pd.DataFrame] = None
525
+ best_score = 0
526
+
527
+ for scheme_key, df in schemes.items():
528
+ key_norm = _norm(scheme_key)
529
+ # Score = length of longest common substring (simple but effective)
530
+ # Use overlap of words instead for robustness
531
+ fund_words = set(fund_norm.split()) if " " in fund_norm else {fund_norm}
532
+ key_words = set(key_norm.split()) if " " in key_norm else {key_norm}
533
+
534
+ # Character-level overlap
535
+ overlap = sum(1 for c in fund_norm if c in key_norm)
536
+ score = overlap / max(len(fund_norm), len(key_norm), 1)
537
+
538
+ if score > best_score and score > 0.7:
539
+ best_score = score
540
+ best_match = df
541
+
542
+ if best_match is not None:
543
+ return best_match
544
+
545
+ return None
546
+
547
+
548
+ # ── Stock PE/PB fetcher ────────────────────────────────────────────────────────
549
+
550
+ def _isin_to_yf_ticker(isin: str) -> str:
551
+ """
552
+ Convert Indian stock ISIN to Yahoo Finance ticker.
553
+ NSE stocks: append .NS (e.g. INE009A01021 β†’ lookup needed)
554
+ We use NSE's ISIN lookup API to get the symbol, then append .NS
555
+ """
556
+ # Check in-process cache first
557
+ cache_key = f"isin_ticker:{isin}"
558
+ cached = _cache_get(cache_key, 7 * 24 * 3600)
559
+ if cached:
560
+ return cached
561
+
562
+ try:
563
+ r = _get_nse_session().get(
564
+ f"https://www.nseindia.com/api/search/autocomplete?q={isin}",
565
+ timeout=10,
566
+ )
567
+ r.raise_for_status()
568
+ results = r.json().get("symbols", [])
569
+ for item in results:
570
+ symbol = item.get("symbol", "")
571
+ if symbol:
572
+ ticker = f"{symbol}.NS"
573
+ _cache_set(cache_key, ticker)
574
+ return ticker
575
+ except Exception:
576
+ pass
577
+ return ""
578
+
579
+
580
+ def _fetch_stock_pe_pb(isin: str) -> tuple[Optional[float], Optional[float]]:
581
+ """
582
+ Fetch PE and PB for a single stock ISIN via yfinance.
583
+ Returns (pe, pb) or (None, None).
584
+ """
585
+ global _STOCK_PE_CACHE
586
+ if isin in _STOCK_PE_CACHE:
587
+ return _STOCK_PE_CACHE[isin]
588
+
589
+ cache_key = f"stock_pe:{isin}"
590
+ cached = _cache_get(cache_key, _STOCK_PE_TTL)
591
+ if cached:
592
+ data = json.loads(cached)
593
+ result = (data.get("pe"), data.get("pb"))
594
+ _STOCK_PE_CACHE[isin] = result
595
+ return result
596
+
597
+ ticker_sym = _isin_to_yf_ticker(isin)
598
+ if not ticker_sym:
599
+ _STOCK_PE_CACHE[isin] = (None, None)
600
+ return None, None
601
+
602
+ try:
603
+ info = yf.Ticker(ticker_sym).info
604
+ pe = info.get("trailingPE") or info.get("forwardPE")
605
+ pb = info.get("priceToBook")
606
+ pe = float(pe) if pe is not None else None
607
+ pb = float(pb) if pb is not None else None
608
+ result = (pe, pb)
609
+ _cache_set(cache_key, json.dumps({"pe": pe, "pb": pb}))
610
+ _STOCK_PE_CACHE[isin] = result
611
+ return result
612
+ except Exception:
613
+ _STOCK_PE_CACHE[isin] = (None, None)
614
+ return None, None
615
+
616
+
617
+ def _compute_active_fund_pe_pb(
618
+ fund_name: str,
619
+ scheme_isin: str = "",
620
+ ) -> tuple[Optional[float], Optional[float]]:
621
+ """
622
+ Compute portfolio-weighted PE/PB for an active fund using AMFI holdings.
623
+
624
+ Portfolio PE = Ξ£ (weight_i Γ— PE_i) / Ξ£ weight_i (only over valid PE stocks)
625
+ Portfolio PB = Ξ£ (weight_i Γ— PB_i) / Ξ£ weight_i
626
+ """
627
+ holdings = _find_scheme_holdings(fund_name, scheme_isin)
628
+ if holdings is None or holdings.empty:
629
+ print(f" [amfi] No holdings found for: {fund_name[:50]}")
630
+ return None, None
631
+
632
+ print(f" [amfi] {fund_name[:45]}: {len(holdings)} holdings β†’ fetching stock PE/PB...")
633
+
634
+ weighted_pe_sum = 0.0
635
+ weighted_pb_sum = 0.0
636
+ weight_pe_total = 0.0
637
+ weight_pb_total = 0.0
638
+
639
+ from concurrent.futures import ThreadPoolExecutor, as_completed
640
+ futures = {}
641
+ with ThreadPoolExecutor(max_workers=10) as ex:
642
+ for _, row in holdings.iterrows():
643
+ isin = row["isin"]
644
+ weight = float(row["weight_pct"])
645
+ futures[ex.submit(_fetch_stock_pe_pb, isin)] = (isin, weight)
646
+
647
+ for fut in as_completed(futures):
648
+ isin, weight = futures[fut]
649
+ try:
650
+ pe, pb = fut.result()
651
+ except Exception:
652
+ pe, pb = None, None
653
+
654
+ if pe is not None and pe > 0:
655
+ weighted_pe_sum += weight * pe
656
+ weight_pe_total += weight
657
+ if pb is not None and pb > 0:
658
+ weighted_pb_sum += weight * pb
659
+ weight_pb_total += weight
660
+
661
+ portfolio_pe = round(weighted_pe_sum / weight_pe_total, 2) if weight_pe_total > 0 else None
662
+ portfolio_pb = round(weighted_pb_sum / weight_pb_total, 2) if weight_pb_total > 0 else None
663
+
664
+ coverage_pct = round(weight_pe_total, 1)
665
+ print(
666
+ f" [amfi] {fund_name[:40]}: "
667
+ f"PE={portfolio_pe} PB={portfolio_pb} "
668
+ f"(coverage {coverage_pct}% of NAV)"
669
+ )
670
+ return portfolio_pe, portfolio_pb
671
+
672
+
673
+ # ═══════════════════════════════════════════════════════════════════════════════
674
+ # PUBLIC API
675
+ # ═══════════════════════════════════════════════════════════════════════════════
676
+
677
+ def fetch_pe_pb(
678
+ benchmark_type: str,
679
+ scheme_code: str = "", # unused, kept for backward compat
680
+ fund_name: str = "",
681
+ category: str = "",
682
+ scheme_isin: str = "",
683
+ ) -> tuple[Optional[float], Optional[float]]:
684
+ """
685
+ Return (pe, pb) for a fund.
686
+
687
+ Routing:
688
+ - Index fund (category contains "Index Fund"/"ETF"/etc.) β†’ NSE index API
689
+ - Debt/liquid (benchmark contains CRISIL/GSEC/etc.) β†’ (None, None)
690
+ - Active fund everything else β†’ AMFI holdings
691
+ └─ Falls back to NSE index PE/PB if AMFI holdings unavailable
692
+ """
693
+ # Debt / liquid β†’ no PE applicable
694
+ if _is_no_pe_benchmark(benchmark_type):
695
+ return None, None
696
+
697
+ # Index funds β†’ use benchmark index PE/PB (accurate, real-time)
698
+ if _is_index_fund(category):
699
+ return _fetch_index_pe_pb(benchmark_type)
700
+
701
+ # Active funds β†’ AMFI holdings-based PE/PB
702
+ if fund_name:
703
+ pe, pb = _compute_active_fund_pe_pb(fund_name, scheme_isin)
704
+ if pe is not None or pb is not None:
705
+ return pe, pb
706
+ # Fallback: if AMFI lookup failed, use index PE/PB as proxy
707
+ print(f" [pe_pb] AMFI fallback β†’ index PE/PB for: {fund_name[:50]}")
708
+
709
+ return _fetch_index_pe_pb(benchmark_type)
710
 
711
 
712
  def batch_fetch_pe_pb(
713
  fund_benchmarks: dict[str, str],
714
+ fund_categories: dict[str, str] | None = None,
715
+ fund_isins: dict[str, str] | None = None,
716
  ) -> dict[str, tuple[Optional[float], Optional[float]]]:
717
  """
718
  {fund_name: benchmark_type} β†’ {fund_name: (pe, pb)}
719
+
720
+ Optional:
721
+ fund_categories: {fund_name: category}
722
+ fund_isins: {fund_name: scheme_isin}
723
  """
724
+ # Pre-warm AMFI XLS once before parallel calls
725
+ _ensure_amfi_loaded()
726
  warm_index_cache()
727
+
728
+ results = {}
729
+ for name, bm in fund_benchmarks.items():
730
+ cat = (fund_categories or {}).get(name, "")
731
+ isin = (fund_isins or {}).get(name, "")
732
+ results[name] = fetch_pe_pb(
733
+ benchmark_type=bm,
734
+ fund_name=name,
735
+ category=cat,
736
+ scheme_isin=isin,
737
+ )
738
+ return results