File size: 1,695 Bytes
1d3834b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
import tensorflow as tf
import numpy as np
from tensorflow.keras.models import load_model
from tensorflow.keras.preprocessing.text import tokenizer_from_json
from tensorflow.keras.preprocessing.sequence import pad_sequences
import json
import pickle
from fastapi import FastAPI, HTTPException
from pydantic import BaseModel

app = FastAPI(title="News Source Classifier")

try:
    model = load_model('news_classifier.h5')
    
    with open('tokenizer.json') as f:
        tokenizer_data = json.load(f)
        tokenizer = tokenizer_from_json(tokenizer_data)
        
    with open('vectorizer.pkl', 'rb') as f:
        vectorizer = pickle.load(f)
except Exception as e:
    print(f"Error loading model: {str(e)}")
    raise

class PredictionRequest(BaseModel):
    text: str

class PredictionResponse(BaseModel):
    source: str
    confidence: float

@app.post("/predict", response_model=PredictionResponse)
async def predict(request: PredictionRequest):
    try:
        sequence = tokenizer.texts_to_sequences([request.text])
        padded = pad_sequences(sequence, maxlen=100)

        prediction = model.predict(padded)
        confidence = float(np.max(prediction))
        
        predicted_class = int(np.argmax(prediction))
        source = 'foxnews' if predicted_class == 0 else 'nbc'
        
        return PredictionResponse(
            source=source,
            confidence=confidence
        )
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))

@app.get("/")
async def root():
    return {
        "message": "News Source Classifier API",
        "usage": "Make a POST request to /predict with a JSON payload containing 'text' field"
    }