Romeo David commited on
Commit
6d66f07
·
2 Parent(s): de78570434b4b7

Merge branch 'main' of https://huggingface.co/spaces/roniorque/df_ai_int into pr/7

Browse files
classes/response_executive_summary.py ADDED
@@ -0,0 +1,115 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ from helper.upload_response import upload_response
6
+ from helper.upload_File import uploadFile
7
+ from pymongo import MongoClient
8
+ from helper.data_field import get_analyst_response
9
+
10
+
11
+ class ExecutiveSummary:
12
+ def __init__(self, model_url):
13
+ self.uploaded_files = []
14
+ self.file_dict = {}
15
+ self.model_url = model_url
16
+ #self.analyst_name = analyst_name
17
+ #self.data_src = data_src
18
+ #self.analyst_description = analyst_description
19
+ self.initialize()
20
+ self.row1()
21
+
22
+ def initialize(self):
23
+ # FOR ENV
24
+ load_dotenv()
25
+
26
+ # AGENT NAME
27
+ #st.header(self.analyst_name)
28
+
29
+ def request_model(self, payload_txt, headers):
30
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
31
+ response.raise_for_status()
32
+ output = response.json()
33
+ #st.write(output)
34
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
35
+ #text = json.loads(text)
36
+ #st.write(text)
37
+ return text
38
+
39
+ def fetch_data(self, data_field):
40
+ mongodb_uri = os.getenv("MONGODB_URI")
41
+ myclient = MongoClient(mongodb_uri)
42
+ mydb = myclient.get_database()
43
+ mycol = mydb["df_data"]
44
+
45
+ # Sort by timestamp field in descending order
46
+ x = mycol.find_one(
47
+ {"data_field": data_field},
48
+ sort=[("timestamp", -1)]
49
+ )
50
+
51
+ x = x["result"]
52
+ return x
53
+
54
+ def process(self):
55
+ with st.spinner('Executive Summary...', show_time=True):
56
+ st.write('')
57
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
58
+ try:
59
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
60
+ payload_txt_model = self.request_model(payload_txt, headers)
61
+ debug_info = {'data_field' : 'Executive Summary', 'result': payload_txt_model}
62
+ upload_response(debug_info)
63
+
64
+ except Exception as e:
65
+ pass
66
+ st.session_state['analyzing'] = False
67
+
68
+ def row1(self):
69
+ st.session_state['analyzing'] = False
70
+ self.payload = ""
71
+
72
+ self.website_and_tools_data = get_analyst_response("Website and Tools Analyst")
73
+ self.sem_data = get_analyst_response("SEM/PPC Analyst")
74
+ self.seo_data = get_analyst_response("SEO Analyst")
75
+ self.on_page_data = get_analyst_response("On Page Analyst")
76
+ self.off_page_data = get_analyst_response("Off Page Analyst")
77
+ self.social_media_data = get_analyst_response("Social Media Analyst")
78
+ self.content_data = get_analyst_response("Content Analyst")
79
+ self.marketpalce_data = get_analyst_response("Marketplace Analyst")
80
+ self.target_market_data = get_analyst_response("Target Market Analyst")
81
+ self.website_audience_data = get_analyst_response("Pull through offers Analyst")
82
+ self.pull_through_data = get_analyst_response("Website Audience Acquisition Analyst")
83
+ self.lld_data = get_analyst_response("LLD/PM/LN Analyst")
84
+ self.pna_data = get_analyst_response("Content - Process and Assets Analyst")
85
+
86
+ analyst_data_dict = {
87
+ "Website and Tools": self.website_and_tools_data,
88
+ "SEM/PPC": self.sem_data,
89
+ "SEO": self.seo_data,
90
+ "On Page": self.on_page_data,
91
+ "Off Page": self.off_page_data,
92
+ "Social Media": self.social_media_data,
93
+ "Content": self.content_data,
94
+ "Marketplace": self.marketpalce_data,
95
+ "Target Market": self.target_market_data,
96
+ "Pull through offers": self.website_audience_data,
97
+ "Website Audience Acquisition": self.pull_through_data,
98
+ "LLD/PM/LN": self.lld_data,
99
+ "Content - Process and Assets": self.pna_data
100
+ }
101
+
102
+
103
+ for analyst_name, data in analyst_data_dict.items():
104
+ self.payload += f"\n\n--- {analyst_name} Analysis ---\n"
105
+ if isinstance(data, list):
106
+ self.payload += "\n".join(map(str, data))
107
+ else:
108
+ self.payload += str(data)
109
+
110
+ self.process()
111
+
112
+ if __name__ == "__main__":
113
+ st.set_page_config(layout="wide")
114
+
115
+ upload = uploadFile()
classes/response_snapshot.py ADDED
@@ -0,0 +1,101 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import streamlit as st
2
+ import requests
3
+ from dotenv import load_dotenv
4
+ import os
5
+ from helper.upload_response import upload_response
6
+ from helper.upload_File import uploadFile
7
+ from pymongo import MongoClient
8
+ from helper.data_field import get_analyst_response
9
+ import json
10
+
11
+
12
+ class Snapshot:
13
+ def __init__(self, model_url):
14
+ self.uploaded_files = []
15
+ self.file_dict = {}
16
+ self.model_url = model_url
17
+ #self.analyst_name = analyst_name
18
+ #self.data_src = data_src
19
+ #self.analyst_description = analyst_description
20
+ self.initialize()
21
+ self.row1()
22
+
23
+ def initialize(self):
24
+ # FOR ENV
25
+ load_dotenv()
26
+
27
+ # AGENT NAME
28
+ #st.header(self.analyst_name)
29
+
30
+ def request_model(self, payload_txt, headers):
31
+ response = requests.post(self.model_url, json=payload_txt, headers=headers)
32
+ response.raise_for_status()
33
+ output = response.json()
34
+ #st.write(output)
35
+ text = output["outputs"][0]["outputs"][0]["results"]["text"]["data"]["text"]
36
+ text = json.loads(text)
37
+ #st.write(text)
38
+ return text
39
+
40
+ def fetch_data(self, data_field):
41
+ mongodb_uri = os.getenv("MONGODB_URI")
42
+ myclient = MongoClient(mongodb_uri)
43
+ mydb = myclient.get_database()
44
+ mycol = mydb["df_data"]
45
+
46
+ # Sort by timestamp field in descending order
47
+ x = mycol.find_one(
48
+ {"data_field": data_field},
49
+ sort=[("timestamp", -1)]
50
+ )
51
+
52
+ x = x["result"]
53
+ return x
54
+
55
+ def process(self):
56
+ with st.spinner('Snapshot...', show_time=True):
57
+ st.write('')
58
+ headers = {"Content-Type": "application/json", "x-api-key": f"{os.getenv('x-api-key')}"}
59
+ try:
60
+ payload_txt = {"input_value": self.payload, "output_type": "text", "input_type": "chat"}
61
+ payload_txt_model = self.request_model(payload_txt, headers)
62
+ debug_info = {'data_field' : 'Snapshot Analyst', 'result': payload_txt_model}
63
+ upload_response(debug_info)
64
+
65
+ except Exception as e:
66
+ pass
67
+ st.session_state['analyzing'] = False
68
+
69
+ def row1(self):
70
+ st.session_state['analyzing'] = False
71
+ self.payload = ""
72
+
73
+ self.website_and_tools_data = get_analyst_response("Website and Tools Analyst")
74
+ self.sem_data = get_analyst_response("SEM/PPC Analyst")
75
+ self.seo_data = get_analyst_response("SEO Analyst")
76
+ self.social_media_data = get_analyst_response("Social Media Analyst")
77
+ self.content_data = get_analyst_response("Content Analyst")
78
+ self.marketpalce_data = get_analyst_response("Marketplace Analyst")
79
+
80
+ analyst_data_dict = {
81
+ "Website and Tools": self.website_and_tools_data,
82
+ "SEM/PPC": self.sem_data,
83
+ "SEO": self.seo_data,
84
+ "Social Media": self.social_media_data,
85
+ "Content": self.content_data,
86
+ "Marketplace": self.marketpalce_data,
87
+ }
88
+
89
+ for analyst_name, data in analyst_data_dict.items():
90
+ self.payload += f"\n\n--- {analyst_name} Analysis ---\n"
91
+ if isinstance(data, list):
92
+ self.payload += "\n".join(map(str, data))
93
+ else:
94
+ self.payload += str(data)
95
+
96
+ self.process()
97
+
98
+ if __name__ == "__main__":
99
+ st.set_page_config(layout="wide")
100
+
101
+ upload = uploadFile()
pages/analyzing_page.py CHANGED
@@ -13,6 +13,8 @@ from classes.response_content import Content
13
  from classes.response_sem_ppc import Sem_PPC
14
  from classes.response_marketplace import Marketplace
15
  from classes.response_target_market import TargetMarket
 
 
16
 
17
  def run_analysis():
18
  # Placeholders for status updates
@@ -27,6 +29,8 @@ def run_analysis():
27
  sem_ppc = st.empty()
28
  marketplace = st.empty()
29
  target_market = st.empty()
 
 
30
 
31
  def run_off_page_analysis():
32
  try:
@@ -137,7 +141,7 @@ def run_analysis():
137
  except Exception as e:
138
  target_market.error(f"Target Market Analysis failed: {e}")
139
  return None
140
-
141
  # Create threads for concurrent execution
142
  off_page_thread = threading.Thread(target=run_off_page_analysis)
143
  on_page_thread = threading.Thread(target=run_on_page_analysis)
@@ -191,7 +195,24 @@ def run_analysis():
191
  target_market_thread.join()
192
 
193
  st.markdown("---")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
  st.success("🎉 All analyses completed!") # Final success message
 
195
  # --- Display Button After Completion ---
196
  if st.button("View Results", icon="📃"):
197
  st.switch_page("pages/output.py")
 
13
  from classes.response_sem_ppc import Sem_PPC
14
  from classes.response_marketplace import Marketplace
15
  from classes.response_target_market import TargetMarket
16
+ from classes.response_executive_summary import ExecutiveSummary
17
+ from classes.response_snapshot import Snapshot
18
 
19
  def run_analysis():
20
  # Placeholders for status updates
 
29
  sem_ppc = st.empty()
30
  marketplace = st.empty()
31
  target_market = st.empty()
32
+ executive_summary_status = st.empty()
33
+ snapshot_status = st.empty()
34
 
35
  def run_off_page_analysis():
36
  try:
 
141
  except Exception as e:
142
  target_market.error(f"Target Market Analysis failed: {e}")
143
  return None
144
+
145
  # Create threads for concurrent execution
146
  off_page_thread = threading.Thread(target=run_off_page_analysis)
147
  on_page_thread = threading.Thread(target=run_on_page_analysis)
 
195
  target_market_thread.join()
196
 
197
  st.markdown("---")
198
+ snapshot_status.info("Starting Snapshot by Channel Analysis...")
199
+ try:
200
+ snapshot = Snapshot(os.getenv('Model_Snapshot_by_Channel_Analyst'))
201
+ snapshot_status.success("Snapshot by Channel Analysis completed successfully.")
202
+ except Exception as e:
203
+ snapshot_status.error(f"Snapshot by Channel Analysis failed: {e}")
204
+
205
+ executive_summary_status.info("Starting Executive Summary Analysis...")
206
+ try:
207
+ executive_summary = ExecutiveSummary(os.getenv('Model_Executive_Summary_Analyst'))
208
+ executive_summary_status.success("Executive Summary Analysis completed successfully.")
209
+ except Exception as e:
210
+ executive_summary_status.error(f"Executive Summary Analysis failed: {e}")
211
+ st.success("🎉 All analyses completed!") # Final success message
212
+
213
+
214
  st.success("🎉 All analyses completed!") # Final success message
215
+
216
  # --- Display Button After Completion ---
217
  if st.button("View Results", icon="📃"):
218
  st.switch_page("pages/output.py")
pages/home.py CHANGED
@@ -54,7 +54,6 @@ class DigitalFootprintDashboard:
54
  else:
55
  st.session_state["analyze"] = ''
56
 
57
-
58
  self.analyze_button = st.button("Analyze", icon="✨", use_container_width=True)
59
  if self.analyze_button == True:
60
  st.switch_page("pages/analyzing_page.py")
 
54
  else:
55
  st.session_state["analyze"] = ''
56
 
 
57
  self.analyze_button = st.button("Analyze", icon="✨", use_container_width=True)
58
  if self.analyze_button == True:
59
  st.switch_page("pages/analyzing_page.py")
pages/output.py CHANGED
@@ -145,8 +145,6 @@ def seo_on_page_table(df_data):
145
  else:
146
  st.warning("No data retrieved for analysis.")
147
  # --- End: Loop and display data ---
148
-
149
-
150
 
151
  def display_outputs():
152
  client_name = data_field("Client Name")
@@ -284,11 +282,13 @@ Regardless, it is still a great channel worth investing to improve a business’
284
  st.write("TBD")
285
 
286
  st.markdown("##### WHAT IS THE PULL-THROUGH OFFER?")
287
- st.write(get_analyst_response("Pull through offers Analyst"))
 
288
 
289
 
290
  st.markdown("##### WEBSITE AUDIENCE ACQUISITION")
291
- st.write(get_analyst_response("Website Audience Acquisition Analyst"))
 
292
 
293
  #LLD/PM/LN
294
  lld_data = get_analyst_response("LLD/PM/LN Analyst")
 
145
  else:
146
  st.warning("No data retrieved for analysis.")
147
  # --- End: Loop and display data ---
 
 
148
 
149
  def display_outputs():
150
  client_name = data_field("Client Name")
 
282
  st.write("TBD")
283
 
284
  st.markdown("##### WHAT IS THE PULL-THROUGH OFFER?")
285
+ pull_through_data = get_analyst_response("Pull through offers Analyst")
286
+ st.write(pull_through_data)
287
 
288
 
289
  st.markdown("##### WEBSITE AUDIENCE ACQUISITION")
290
+ website_audience_data = get_analyst_response("Website Audience Acquisition Analyst")
291
+ st.write(website_audience_data)
292
 
293
  #LLD/PM/LN
294
  lld_data = get_analyst_response("LLD/PM/LN Analyst")