Spaces:
Runtime error
Runtime error
| import asyncio | |
| import logging | |
| import spacy | |
| from app.services.llm_service import get_llm_service | |
| from app.services.spacy_medical_nlp import get_spacy_nlp | |
| # Configure Logging | |
| logging.basicConfig(level=logging.INFO, format='%(levelname)s: %(message)s') | |
| logger = logging.getLogger("FINAL_VERIFY") | |
| async def verify_stack(): | |
| print("\n" + "="*60) | |
| print("π FINAL SYSTEM STACK VERIFICATION") | |
| print("="*60) | |
| # --- 1. NLP Verification (SciSpaCy) --- | |
| print("\n㪠1. Verifying Medical NLP (SciSpaCy)...") | |
| try: | |
| nlp_service = get_spacy_nlp() | |
| if nlp_service.nlp and "sci" in nlp_service.nlp.meta.get("lang", "") + nlp_service.nlp.meta.get("name", ""): | |
| print(" β SciSpaCy Model Found: en_core_sci_md") | |
| else: | |
| print(f" βΉοΈ Model loaded: {nlp_service.nlp.meta.get('name') if nlp_service.nlp else 'None'}") | |
| text = "Patient prescribed 100mg Aspirin for severe hypertension." | |
| entities = nlp_service.extract_medical_entities(text) | |
| if entities: | |
| print(f" β Extraction Success: {entities}") | |
| else: | |
| print(" β οΈ No entities extracted (Check model type)") | |
| except Exception as e: | |
| print(f" β NLP Setup Failed: {e}") | |
| # --- 2. LLM Verification (TinyLlama) --- | |
| print("\nπ€ 2. Verifying Local LLM (TinyLlama)...") | |
| try: | |
| llm = get_llm_service() | |
| prompt = "Explain fever in one sentence." | |
| print(f" π€ Prompt: '{prompt}'") | |
| response = await llm.generate_response(prompt, system_prompt="You are a doctor.") | |
| print(f" π₯ Response: {response.strip()}") | |
| if response and "apologize" not in response.lower(): | |
| print(" β Local Inference Success!") | |
| else: | |
| print(" β οΈ Fallback Triggered (Check LLM logs)") | |
| except Exception as e: | |
| print(f" β LLM Inference Failed: {e}") | |
| print("\n" + "="*60) | |
| if __name__ == "__main__": | |
| asyncio.run(verify_stack()) | |