prasai-ap commited on
Commit
053521c
·
verified ·
1 Parent(s): f1e431e

Upload 3 files

Browse files
Files changed (1) hide show
  1. app.py +43 -18
app.py CHANGED
@@ -1,3 +1,4 @@
 
1
  import os
2
  from typing import Any
3
  from functools import lru_cache
@@ -30,7 +31,7 @@ EMBEDDING_MODEL = os.getenv(
30
 
31
  def upload_textbook(pdf_path):
32
  if not pdf_path:
33
- return "Choose a PDF first.", {}, gr.update()
34
 
35
  if not BACKEND_URL:
36
  return upload_textbook_locally(pdf_path)
@@ -50,17 +51,17 @@ def upload_textbook(pdf_path):
50
  return (
51
  f"Uploaded {result['filename']} with {result['page_count']} pages "
52
  f"and {result['chunk_count']} chunks.{method_text}",
53
- {},
54
  gr.update(value=""),
55
  )
56
 
57
- return _response_error(response, "Upload failed."), {}, gr.update()
58
  except requests.Timeout:
59
- return "Backend is still processing the PDF. Try a smaller PDF for the demo.", {}, gr.update()
60
  except requests.RequestException as exc:
61
- return f"Could not reach backend: {exc}", {}, gr.update()
62
  except OSError as exc:
63
- return f"Could not read uploaded PDF: {exc}", {}, gr.update()
64
 
65
 
66
  def upload_textbook_locally(pdf_path):
@@ -69,7 +70,7 @@ def upload_textbook_locally(pdf_path):
69
  chunks = chunk_text(extracted["text"])
70
 
71
  if not chunks:
72
- return "No readable text chunks could be created from this PDF.", {}, gr.update()
73
 
74
  embeddings = embed_texts(chunks)
75
  state = {
@@ -86,11 +87,11 @@ def upload_textbook_locally(pdf_path):
86
  f"{state['page_count']} pages and {state['chunk_count']} chunks. "
87
  f"Text extraction: {state['extraction_method']}."
88
  ),
89
- state,
90
  gr.update(value=""),
91
  )
92
  except Exception as exc:
93
- return f"Could not process uploaded PDF in this Space: {exc}", {}, gr.update()
94
 
95
 
96
  def ask_tutor(
@@ -110,7 +111,7 @@ def ask_tutor(
110
  "1. Add a question first.\n2. Then try again.\n3. Use a textbook topic.",
111
  "",
112
  "Waiting for a question.",
113
- {},
114
  )
115
 
116
  if BACKEND_URL:
@@ -123,7 +124,7 @@ def ask_tutor(
123
  question=question,
124
  student_id=student_id,
125
  textbook_context=textbook_context,
126
- textbook_state=textbook_state or {},
127
  )
128
 
129
 
@@ -180,7 +181,7 @@ def format_backend_response(
180
  format_quiz(quiz_questions),
181
  format_sources(data.get("retrieved_sources", [])),
182
  "Answered with the backend RAG workflow.",
183
- state,
184
  )
185
 
186
 
@@ -191,7 +192,7 @@ def grade_quiz(
191
  student_id,
192
  quiz_state,
193
  ):
194
- quiz_state = quiz_state or {}
195
  quiz_id = quiz_state.get("quiz_id")
196
 
197
  if not BACKEND_URL:
@@ -441,7 +442,7 @@ def mock_response(question: str, textbook_context: str) -> tuple[str, str, str,
441
  ]
442
  ),
443
  "Demo fallback is active. Configure BACKEND_URL in Space settings for PDF upload, RAG search, quiz grading, and parent summary.",
444
- {"quiz_questions": quiz_questions},
445
  )
446
 
447
 
@@ -489,7 +490,7 @@ def local_response(
489
  format_quiz(quiz_questions),
490
  format_sources(sources),
491
  "Answered with the Hugging Face Space local PDF workflow.",
492
- quiz_state,
493
  )
494
 
495
 
@@ -768,6 +769,25 @@ def _response_error(response: requests.Response, fallback: str) -> str:
768
  return fallback
769
 
770
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
771
  def truncate(text: str, max_length: int) -> str:
772
  if len(text) <= max_length:
773
  return text
@@ -784,8 +804,8 @@ with gr.Blocks(title=APP_NAME, theme=gr.themes.Soft()) as demo:
784
  """
785
  )
786
 
787
- quiz_state = gr.State({})
788
- textbook_state = gr.State({})
789
 
790
  with gr.Row():
791
  student_id_input = gr.Textbox(
@@ -870,7 +890,8 @@ with gr.Blocks(title=APP_NAME, theme=gr.themes.Soft()) as demo:
870
  status_output,
871
  quiz_state,
872
  ],
873
- fn=lambda question, context: ask_tutor(question, "hf-space-demo", context, {}),
 
874
  cache_examples=False,
875
  )
876
 
@@ -878,6 +899,7 @@ with gr.Blocks(title=APP_NAME, theme=gr.themes.Soft()) as demo:
878
  fn=upload_textbook,
879
  inputs=[pdf_input],
880
  outputs=[upload_output, textbook_state, context_input],
 
881
  )
882
  ask_button.click(
883
  fn=ask_tutor,
@@ -890,16 +912,19 @@ with gr.Blocks(title=APP_NAME, theme=gr.themes.Soft()) as demo:
890
  status_output,
891
  quiz_state,
892
  ],
 
893
  )
894
  grade_button.click(
895
  fn=grade_quiz,
896
  inputs=[answer_1, answer_2, answer_3, student_id_input, quiz_state],
897
  outputs=[grade_output],
 
898
  )
899
  summary_button.click(
900
  fn=parent_summary,
901
  inputs=[student_id_input],
902
  outputs=[summary_output],
 
903
  )
904
 
905
 
 
1
+ import json
2
  import os
3
  from typing import Any
4
  from functools import lru_cache
 
31
 
32
  def upload_textbook(pdf_path):
33
  if not pdf_path:
34
+ return "Choose a PDF first.", "{}", gr.update()
35
 
36
  if not BACKEND_URL:
37
  return upload_textbook_locally(pdf_path)
 
51
  return (
52
  f"Uploaded {result['filename']} with {result['page_count']} pages "
53
  f"and {result['chunk_count']} chunks.{method_text}",
54
+ "{}",
55
  gr.update(value=""),
56
  )
57
 
58
+ return _response_error(response, "Upload failed."), "{}", gr.update()
59
  except requests.Timeout:
60
+ return "Backend is still processing the PDF. Try a smaller PDF for the demo.", "{}", gr.update()
61
  except requests.RequestException as exc:
62
+ return f"Could not reach backend: {exc}", "{}", gr.update()
63
  except OSError as exc:
64
+ return f"Could not read uploaded PDF: {exc}", "{}", gr.update()
65
 
66
 
67
  def upload_textbook_locally(pdf_path):
 
70
  chunks = chunk_text(extracted["text"])
71
 
72
  if not chunks:
73
+ return "No readable text chunks could be created from this PDF.", "{}", gr.update()
74
 
75
  embeddings = embed_texts(chunks)
76
  state = {
 
87
  f"{state['page_count']} pages and {state['chunk_count']} chunks. "
88
  f"Text extraction: {state['extraction_method']}."
89
  ),
90
+ encode_state(state),
91
  gr.update(value=""),
92
  )
93
  except Exception as exc:
94
+ return f"Could not process uploaded PDF in this Space: {exc}", "{}", gr.update()
95
 
96
 
97
  def ask_tutor(
 
111
  "1. Add a question first.\n2. Then try again.\n3. Use a textbook topic.",
112
  "",
113
  "Waiting for a question.",
114
+ "{}",
115
  )
116
 
117
  if BACKEND_URL:
 
124
  question=question,
125
  student_id=student_id,
126
  textbook_context=textbook_context,
127
+ textbook_state=decode_state(textbook_state),
128
  )
129
 
130
 
 
181
  format_quiz(quiz_questions),
182
  format_sources(data.get("retrieved_sources", [])),
183
  "Answered with the backend RAG workflow.",
184
+ encode_state(state),
185
  )
186
 
187
 
 
192
  student_id,
193
  quiz_state,
194
  ):
195
+ quiz_state = decode_state(quiz_state)
196
  quiz_id = quiz_state.get("quiz_id")
197
 
198
  if not BACKEND_URL:
 
442
  ]
443
  ),
444
  "Demo fallback is active. Configure BACKEND_URL in Space settings for PDF upload, RAG search, quiz grading, and parent summary.",
445
+ encode_state({"quiz_questions": quiz_questions}),
446
  )
447
 
448
 
 
490
  format_quiz(quiz_questions),
491
  format_sources(sources),
492
  "Answered with the Hugging Face Space local PDF workflow.",
493
+ encode_state(quiz_state),
494
  )
495
 
496
 
 
769
  return fallback
770
 
771
 
772
+ def encode_state(state: dict[str, Any]) -> str:
773
+ return json.dumps(state, ensure_ascii=False)
774
+
775
+
776
+ def decode_state(state: Any) -> dict[str, Any]:
777
+ if isinstance(state, dict):
778
+ return state
779
+
780
+ if not state:
781
+ return {}
782
+
783
+ try:
784
+ decoded = json.loads(str(state))
785
+ except (TypeError, ValueError):
786
+ return {}
787
+
788
+ return decoded if isinstance(decoded, dict) else {}
789
+
790
+
791
  def truncate(text: str, max_length: int) -> str:
792
  if len(text) <= max_length:
793
  return text
 
804
  """
805
  )
806
 
807
+ quiz_state = gr.State("{}")
808
+ textbook_state = gr.State("{}")
809
 
810
  with gr.Row():
811
  student_id_input = gr.Textbox(
 
890
  status_output,
891
  quiz_state,
892
  ],
893
+ fn=lambda question, context: ask_tutor(question, "hf-space-demo", context, "{}"),
894
+ api_name=False,
895
  cache_examples=False,
896
  )
897
 
 
899
  fn=upload_textbook,
900
  inputs=[pdf_input],
901
  outputs=[upload_output, textbook_state, context_input],
902
+ api_name=False,
903
  )
904
  ask_button.click(
905
  fn=ask_tutor,
 
912
  status_output,
913
  quiz_state,
914
  ],
915
+ api_name=False,
916
  )
917
  grade_button.click(
918
  fn=grade_quiz,
919
  inputs=[answer_1, answer_2, answer_3, student_id_input, quiz_state],
920
  outputs=[grade_output],
921
+ api_name=False,
922
  )
923
  summary_button.click(
924
  fn=parent_summary,
925
  inputs=[student_id_input],
926
  outputs=[summary_output],
927
+ api_name=False,
928
  )
929
 
930