from fastapi import FastAPI, UploadFile, File, HTTPException from tensorflow.keras.models import load_model from tensorflow.keras.preprocessing import image import numpy as np from PIL import Image import io app = FastAPI(title="OralScan Model API") # Load the model globally (simpler method for HF Spaces) try: model = load_model("model.keras") print("✅ MobileNetV2 model loaded successfully!") except Exception as e: print(f"❌ Failed to load model: {e}") model = None @app.get("/") def home(): if model is None: return {"message": "API is running but model failed to load"} return {"message": "OralScan Model API is running! Upload image to /predict"} @app.post("/predict") async def predict(file: UploadFile = File(...)): if model is None: raise HTTPException(status_code=500, detail="Model failed to load. Please check logs.") try: contents = await file.read() img = Image.open(io.BytesIO(contents)).convert("RGB") img = img.resize((224, 224)) img_array = image.img_to_array(img) img_array = np.expand_dims(img_array, axis=0) img_array = img_array / 255.0 predictions = model.predict(img_array, verbose=0) predicted_class = int(np.argmax(predictions[0])) confidence = float(np.max(predictions[0]) * 100) class_names = [ "Oral Homogenous Leukoplakia", "Oral Non-Homogenous Leukoplakia", "Other Oral White Lesions" ] return { "predicted_class": predicted_class, "class_name": class_names[predicted_class], "confidence": round(confidence, 2), "message": "Prediction successful" } except Exception as e: raise HTTPException(status_code=400, detail=f"Error processing image: {str(e)}")