Spaces:
Running
Running
andykr1k commited on
Commit ·
739b5c0
1
Parent(s): 4ff1bb6
added scheduler, logging and optimization updates
Browse files
app.py
CHANGED
|
@@ -73,7 +73,6 @@ def load_and_preprocess_data():
|
|
| 73 |
followers = fetch_table('followers', 'id, following')
|
| 74 |
users = fetch_table('profiles', 'id')
|
| 75 |
|
| 76 |
-
# Use native Python for merging instead of pandas
|
| 77 |
follower_dict = {f['id']: f['following'] for f in followers}
|
| 78 |
user_set = {u['id'] for u in users}
|
| 79 |
merged = [
|
|
@@ -89,12 +88,8 @@ def create_graph_dataframe(merged_data):
|
|
| 89 |
G.add_edges_from(edges)
|
| 90 |
user_ids = sorted(G.nodes())
|
| 91 |
|
| 92 |
-
# Use
|
| 93 |
-
features = torch.
|
| 94 |
-
torch.arange(len(user_ids)).repeat(2, 1),
|
| 95 |
-
torch.ones(len(user_ids)),
|
| 96 |
-
(len(user_ids), len(user_ids))
|
| 97 |
-
)
|
| 98 |
logger.info(f"Created graph with {len(user_ids)} nodes")
|
| 99 |
return G, features, user_ids
|
| 100 |
|
|
@@ -164,7 +159,7 @@ def train_model(model, data, pos_edges, neg_edges, epochs=200, patience=20):
|
|
| 164 |
break
|
| 165 |
|
| 166 |
logger.info("Model training completed")
|
| 167 |
-
return model.to('cpu')
|
| 168 |
|
| 169 |
def get_recommendations(user_id, model, data, G, user_ids, top_k=10):
|
| 170 |
if user_id not in user_ids:
|
|
@@ -187,7 +182,7 @@ def get_recommendations(user_id, model, data, G, user_ids, top_k=10):
|
|
| 187 |
|
| 188 |
def rebuild_model():
|
| 189 |
global G, features, user_ids, pyg_data, trained_model
|
| 190 |
-
logger.info("Starting model rebuild at 3:30 AM")
|
| 191 |
try:
|
| 192 |
merged_data = load_and_preprocess_data()
|
| 193 |
G, features, user_ids = create_graph_dataframe(merged_data)
|
|
@@ -214,7 +209,6 @@ async def get_recommendations_handler(user_id: str = Query(...)):
|
|
| 214 |
|
| 215 |
recommendations = get_recommendations(user_id, trained_model, pyg_data, G, user_ids)
|
| 216 |
|
| 217 |
-
# Stream the response
|
| 218 |
def generate():
|
| 219 |
yield '{"status": "success", "recommendations": ['
|
| 220 |
for i, rec in enumerate(recommendations):
|
|
@@ -229,11 +223,11 @@ async def get_recommendations_handler(user_id: str = Query(...)):
|
|
| 229 |
async def health_check():
|
| 230 |
return {"status": "success", "message": "Recommendation service operational"}
|
| 231 |
|
| 232 |
-
# Scheduler setup
|
| 233 |
scheduler = BackgroundScheduler(timezone="America/Los_Angeles")
|
| 234 |
scheduler.add_job(
|
| 235 |
rebuild_model,
|
| 236 |
-
trigger=CronTrigger(hour=3, minute=30), # Run at 3:30 AM every day
|
| 237 |
id='daily_model_rebuild',
|
| 238 |
replace_existing=True
|
| 239 |
)
|
|
@@ -242,7 +236,7 @@ scheduler.add_job(
|
|
| 242 |
async def startup_event():
|
| 243 |
rebuild_model() # Initial build on startup
|
| 244 |
scheduler.start()
|
| 245 |
-
logger.info("Scheduler started, model will rebuild daily at 3:30 AM")
|
| 246 |
|
| 247 |
@app.on_event("shutdown")
|
| 248 |
async def shutdown_event():
|
|
|
|
| 73 |
followers = fetch_table('followers', 'id, following')
|
| 74 |
users = fetch_table('profiles', 'id')
|
| 75 |
|
|
|
|
| 76 |
follower_dict = {f['id']: f['following'] for f in followers}
|
| 77 |
user_set = {u['id'] for u in users}
|
| 78 |
merged = [
|
|
|
|
| 88 |
G.add_edges_from(edges)
|
| 89 |
user_ids = sorted(G.nodes())
|
| 90 |
|
| 91 |
+
# Use dense identity matrix for features (sparse not supported by SAGEConv)
|
| 92 |
+
features = torch.eye(len(user_ids))
|
|
|
|
|
|
|
|
|
|
|
|
|
| 93 |
logger.info(f"Created graph with {len(user_ids)} nodes")
|
| 94 |
return G, features, user_ids
|
| 95 |
|
|
|
|
| 159 |
break
|
| 160 |
|
| 161 |
logger.info("Model training completed")
|
| 162 |
+
return model.to('cpu')
|
| 163 |
|
| 164 |
def get_recommendations(user_id, model, data, G, user_ids, top_k=10):
|
| 165 |
if user_id not in user_ids:
|
|
|
|
| 182 |
|
| 183 |
def rebuild_model():
|
| 184 |
global G, features, user_ids, pyg_data, trained_model
|
| 185 |
+
logger.info("Starting model rebuild at 3:30 AM Pacific Time")
|
| 186 |
try:
|
| 187 |
merged_data = load_and_preprocess_data()
|
| 188 |
G, features, user_ids = create_graph_dataframe(merged_data)
|
|
|
|
| 209 |
|
| 210 |
recommendations = get_recommendations(user_id, trained_model, pyg_data, G, user_ids)
|
| 211 |
|
|
|
|
| 212 |
def generate():
|
| 213 |
yield '{"status": "success", "recommendations": ['
|
| 214 |
for i, rec in enumerate(recommendations):
|
|
|
|
| 223 |
async def health_check():
|
| 224 |
return {"status": "success", "message": "Recommendation service operational"}
|
| 225 |
|
| 226 |
+
# Scheduler setup with Pacific Time Zone
|
| 227 |
scheduler = BackgroundScheduler(timezone="America/Los_Angeles")
|
| 228 |
scheduler.add_job(
|
| 229 |
rebuild_model,
|
| 230 |
+
trigger=CronTrigger(hour=3, minute=30), # Run at 3:30 AM Pacific Time every day
|
| 231 |
id='daily_model_rebuild',
|
| 232 |
replace_existing=True
|
| 233 |
)
|
|
|
|
| 236 |
async def startup_event():
|
| 237 |
rebuild_model() # Initial build on startup
|
| 238 |
scheduler.start()
|
| 239 |
+
logger.info("Scheduler started, model will rebuild daily at 3:30 AM Pacific Time")
|
| 240 |
|
| 241 |
@app.on_event("shutdown")
|
| 242 |
async def shutdown_event():
|