MohamedTry commited on
Commit
84163d0
·
verified ·
1 Parent(s): f55c904

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +13 -0
  2. app.py +47 -0
  3. requirements.txt +4 -0
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.10
2
+
3
+ WORKDIR /app
4
+
5
+ COPY requirements.txt .
6
+
7
+ RUN pip install --no-cache-dir -r requirements.txt
8
+
9
+ COPY . .
10
+
11
+ EXPOSE 7860
12
+
13
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from fastapi import FastAPI
2
+ from pydantic import BaseModel
3
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
4
+ import torch
5
+
6
+ app = FastAPI()
7
+
8
+ MODEL_NAME = "kamalkraj/distilbiobert_cancer_classification"
9
+
10
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
11
+ model = AutoModelForSequenceClassification.from_pretrained(MODEL_NAME)
12
+
13
+ LABELS = [
14
+ "breast_cancer",
15
+ "lung_cancer",
16
+ "prostate_cancer",
17
+ "colon_cancer",
18
+ "lymphoma",
19
+ "melanoma",
20
+ "thyroid_cancer",
21
+ "kidney_cancer",
22
+ "pancreatic_cancer",
23
+ "ovarian_cancer",
24
+ "cervical_cancer",
25
+ "brain_tumor"
26
+ ]
27
+
28
+ class Input(BaseModel):
29
+ text: str
30
+
31
+ @app.get("/")
32
+ def home():
33
+ return {"status": "Cancer Type Classifier is running!"}
34
+
35
+ @app.post("/predict")
36
+ def predict(data: Input):
37
+ inputs = tokenizer(data.text, return_tensors="pt", truncation=True)
38
+ outputs = model(**inputs)
39
+
40
+ probs = torch.nn.functional.softmax(outputs.logits, dim=1)
41
+ label_id = torch.argmax(probs).item()
42
+ confidence = float(torch.max(probs))
43
+
44
+ return {
45
+ "prediction": LABELS[label_id],
46
+ "confidence": confidence
47
+ }
requirements.txt ADDED
@@ -0,0 +1,4 @@
 
 
 
 
 
1
+ fastapi
2
+ uvicorn
3
+ transformers
4
+ torch