Upload folder using huggingface_hub
Browse files- .github/workflows/main.yml +0 -1
- Dockerfile +2 -1
- __pycache__/app.cpython-310.pyc +0 -0
- app.py +27 -14
- prediction_output/output.csv +6 -6
- src/pipeline/__pycache__/predict_pipeline.cpython-310.pyc +0 -0
- src/pipeline/__pycache__/train_pipeline.cpython-310.pyc +0 -0
- src/pipeline/predict_pipeline.py +92 -53
.github/workflows/main.yml
CHANGED
|
@@ -28,7 +28,6 @@ jobs:
|
|
| 28 |
run: |
|
| 29 |
docker build -t ${{ secrets.DOCKER_USERNAME }}/dynamic-pricing:latest .
|
| 30 |
docker push ${{ secrets.DOCKER_USERNAME }}/dynamic-pricing:latest
|
| 31 |
-
|
| 32 |
deploy-to-huggingface:
|
| 33 |
runs-on: ubuntu-latest
|
| 34 |
needs: build-and-push
|
|
|
|
| 28 |
run: |
|
| 29 |
docker build -t ${{ secrets.DOCKER_USERNAME }}/dynamic-pricing:latest .
|
| 30 |
docker push ${{ secrets.DOCKER_USERNAME }}/dynamic-pricing:latest
|
|
|
|
| 31 |
deploy-to-huggingface:
|
| 32 |
runs-on: ubuntu-latest
|
| 33 |
needs: build-and-push
|
Dockerfile
CHANGED
|
@@ -17,4 +17,5 @@ COPY . .
|
|
| 17 |
EXPOSE 8080
|
| 18 |
|
| 19 |
# Default command to run the application
|
| 20 |
-
CMD ["uvicorn", "app:app"
|
|
|
|
|
|
| 17 |
EXPOSE 8080
|
| 18 |
|
| 19 |
# Default command to run the application
|
| 20 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "8080"]
|
| 21 |
+
|
__pycache__/app.cpython-310.pyc
CHANGED
|
Binary files a/__pycache__/app.cpython-310.pyc and b/__pycache__/app.cpython-310.pyc differ
|
|
|
app.py
CHANGED
|
@@ -15,6 +15,7 @@ import pandas as pd
|
|
| 15 |
import sys
|
| 16 |
from src.utils.main_utils.utils import load_object
|
| 17 |
from src.utils.ml_utils.model.estimator import MLModel
|
|
|
|
| 18 |
from dotenv import load_dotenv
|
| 19 |
import pymongo
|
| 20 |
|
|
@@ -51,24 +52,36 @@ async def train_route():
|
|
| 51 |
except Exception as e:
|
| 52 |
raise CustomException(e,sys)
|
| 53 |
|
|
|
|
| 54 |
@app.post("/predict")
|
| 55 |
-
async def predict_route(request: Request,file: UploadFile = File(...)):
|
| 56 |
try:
|
| 57 |
-
df=pd.read_csv(file.file)
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
|
| 62 |
-
|
| 63 |
-
|
| 64 |
-
|
| 65 |
-
|
| 66 |
-
df.
|
| 67 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 68 |
return templates.TemplateResponse("table.html", {"request": request, "table": table_html})
|
| 69 |
-
|
| 70 |
except Exception as e:
|
| 71 |
-
|
|
|
|
| 72 |
|
| 73 |
if __name__ == "__main__":
|
| 74 |
app_run(host="localhost",port=8080,debug=True)
|
|
|
|
| 15 |
import sys
|
| 16 |
from src.utils.main_utils.utils import load_object
|
| 17 |
from src.utils.ml_utils.model.estimator import MLModel
|
| 18 |
+
from src.pipeline.predict_pipeline import PredictPipeline
|
| 19 |
from dotenv import load_dotenv
|
| 20 |
import pymongo
|
| 21 |
|
|
|
|
| 52 |
except Exception as e:
|
| 53 |
raise CustomException(e,sys)
|
| 54 |
|
| 55 |
+
|
| 56 |
@app.post("/predict")
|
| 57 |
+
async def predict_route(request: Request, file: UploadFile = File(...)):
|
| 58 |
try:
|
| 59 |
+
df = pd.read_csv(file.file)
|
| 60 |
+
|
| 61 |
+
required_columns = [
|
| 62 |
+
"InvoiceNo", "StockCode", "Description", "Quantity", "InvoiceDate", "UnitPrice",
|
| 63 |
+
"CustomerID", "Country", "hour", "weekday", "week", "total_sales",
|
| 64 |
+
"peak_period_level", "overall_demand_level", "RecencySegment", "FrequencySegment",
|
| 65 |
+
"MonetarySegment", "country_purchasing_power", "sales_level_by_country",
|
| 66 |
+
]
|
| 67 |
+
|
| 68 |
+
if not all(col in df.columns for col in required_columns):
|
| 69 |
+
return {"error": "Missing required input columns"}
|
| 70 |
+
|
| 71 |
+
predict_pipeline = PredictPipeline()
|
| 72 |
+
predictions = predict_pipeline.predict(df)
|
| 73 |
+
|
| 74 |
+
df["predicted_price"] = predictions
|
| 75 |
+
|
| 76 |
+
df.to_csv("prediction_output/output.csv", index=False)
|
| 77 |
+
|
| 78 |
+
table_html = df.to_html(classes="table table-striped")
|
| 79 |
+
|
| 80 |
return templates.TemplateResponse("table.html", {"request": request, "table": table_html})
|
| 81 |
+
|
| 82 |
except Exception as e:
|
| 83 |
+
raise CustomException(e, sys)
|
| 84 |
+
|
| 85 |
|
| 86 |
if __name__ == "__main__":
|
| 87 |
app_run(host="localhost",port=8080,debug=True)
|
prediction_output/output.csv
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
|
|
|
| 1 |
+
InvoiceNo,StockCode,Description,Quantity,InvoiceDate,UnitPrice,CustomerID,Country,hour,weekday,week,total_sales,peak_period_level,overall_demand_level,RecencySegment,FrequencySegment,MonetarySegment,country_purchasing_power,sales_level_by_country,predicted_price
|
| 2 |
+
536365,85123A,WHITE HANGING HEART T-LIGHT HOLDER,6,2010-12-01 08:26:00,2.55,17850.0,United Kingdom,8,2,48,15.3,Very Low,High,Very Low,Very Low,High,High,Very High,3.315773987352596
|
| 3 |
+
536365,71053,WHITE METAL LANTERN,6,2010-12-01 08:26:00,3.39,17850.0,United Kingdom,8,2,48,20.34,Very Low,Medium,Very Low,Very Low,High,High,Very High,4.137054910286077
|
| 4 |
+
536365,84406B,CREAM CUPID HEARTS COAT HANGER,8,2010-12-01 08:26:00,2.75,17850.0,United Kingdom,8,2,48,22.0,Very Low,High,Very Low,Very Low,High,High,Very High,3.5508847676251962
|
| 5 |
+
536365,84029G,KNITTED UNION FLAG HOT WATER BOTTLE,6,2010-12-01 08:26:00,3.39,17850.0,United Kingdom,8,2,48,20.34,Very Low,High,Very Low,Very Low,High,High,Very High,4.438201940085351
|
| 6 |
+
536365,84029E,RED WOOLLY HOTTIE WHITE HEART.,6,2010-12-01 08:26:00,3.39,17850.0,United Kingdom,8,2,48,20.34,Very Low,High,Very Low,Very Low,High,High,Very High,4.438201940085351
|
src/pipeline/__pycache__/predict_pipeline.cpython-310.pyc
CHANGED
|
Binary files a/src/pipeline/__pycache__/predict_pipeline.cpython-310.pyc and b/src/pipeline/__pycache__/predict_pipeline.cpython-310.pyc differ
|
|
|
src/pipeline/__pycache__/train_pipeline.cpython-310.pyc
CHANGED
|
Binary files a/src/pipeline/__pycache__/train_pipeline.cpython-310.pyc and b/src/pipeline/__pycache__/train_pipeline.cpython-310.pyc differ
|
|
|
src/pipeline/predict_pipeline.py
CHANGED
|
@@ -1,56 +1,95 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
|
| 4 |
-
|
| 5 |
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
|
| 9 |
|
| 10 |
-
|
| 11 |
-
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
|
| 19 |
-
|
| 20 |
-
|
|
|
|
|
|
|
| 21 |
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
| 46 |
-
|
| 47 |
-
|
| 48 |
-
|
| 49 |
-
|
| 50 |
-
|
| 51 |
-
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import sys
|
| 2 |
+
import pandas as pd
|
| 3 |
+
from src.exception.exception import CustomException
|
| 4 |
+
from src.utils.main_utils.utils import load_object
|
| 5 |
|
| 6 |
+
class PredictPipeline:
|
| 7 |
+
def __init__(self):
|
| 8 |
+
pass
|
| 9 |
|
| 10 |
+
def predict(self, features):
|
| 11 |
+
try:
|
| 12 |
+
model_path = "final_model/model.pkl"
|
| 13 |
+
preprocessor_path = "final_model/preprocessor.pkl"
|
| 14 |
+
|
| 15 |
+
model = load_object(file_path=model_path)
|
| 16 |
+
preprocessor = load_object(file_path=preprocessor_path)
|
| 17 |
+
|
| 18 |
+
data_scaled = preprocessor.transform(features)
|
| 19 |
+
preds = model.predict(data_scaled)
|
| 20 |
+
return preds
|
| 21 |
+
except Exception as e:
|
| 22 |
+
raise CustomException(e, sys)
|
| 23 |
|
| 24 |
+
class CustomData:
|
| 25 |
+
def __init__(
|
| 26 |
+
self,
|
| 27 |
+
InvoiceNo: int,
|
| 28 |
+
StockCode: str,
|
| 29 |
+
Description: str,
|
| 30 |
+
Quantity: int,
|
| 31 |
+
InvoiceDate: str,
|
| 32 |
+
UnitPrice: float,
|
| 33 |
+
CustomerID: float,
|
| 34 |
+
Country: str,
|
| 35 |
+
hour: int,
|
| 36 |
+
weekday: int,
|
| 37 |
+
week: int,
|
| 38 |
+
total_sales: float,
|
| 39 |
+
peak_period_level: str,
|
| 40 |
+
overall_demand_level: str,
|
| 41 |
+
RecencySegment: str,
|
| 42 |
+
FrequencySegment: str,
|
| 43 |
+
MonetarySegment: str,
|
| 44 |
+
country_purchasing_power: str,
|
| 45 |
+
sales_level_by_country: str,
|
| 46 |
+
adjusted_unit_price: float,
|
| 47 |
+
):
|
| 48 |
+
self.InvoiceNo = InvoiceNo
|
| 49 |
+
self.StockCode = StockCode
|
| 50 |
+
self.Description = Description
|
| 51 |
+
self.Quantity = Quantity
|
| 52 |
+
self.InvoiceDate = InvoiceDate
|
| 53 |
+
self.UnitPrice = UnitPrice
|
| 54 |
+
self.CustomerID = CustomerID
|
| 55 |
+
self.Country = Country
|
| 56 |
+
self.hour = hour
|
| 57 |
+
self.weekday = weekday
|
| 58 |
+
self.week = week
|
| 59 |
+
self.total_sales = total_sales
|
| 60 |
+
self.peak_period_level = peak_period_level
|
| 61 |
+
self.overall_demand_level = overall_demand_level
|
| 62 |
+
self.RecencySegment = RecencySegment
|
| 63 |
+
self.FrequencySegment = FrequencySegment
|
| 64 |
+
self.MonetarySegment = MonetarySegment
|
| 65 |
+
self.country_purchasing_power = country_purchasing_power
|
| 66 |
+
self.sales_level_by_country = sales_level_by_country
|
| 67 |
+
self.adjusted_unit_price = adjusted_unit_price
|
| 68 |
+
|
| 69 |
+
def get_data_as_data_frame(self):
|
| 70 |
+
try:
|
| 71 |
+
data_dict = {
|
| 72 |
+
"InvoiceNo": [self.InvoiceNo],
|
| 73 |
+
"StockCode": [self.StockCode],
|
| 74 |
+
"Description": [self.Description],
|
| 75 |
+
"Quantity": [self.Quantity],
|
| 76 |
+
"InvoiceDate": [self.InvoiceDate],
|
| 77 |
+
"UnitPrice": [self.UnitPrice],
|
| 78 |
+
"CustomerID": [self.CustomerID],
|
| 79 |
+
"Country": [self.Country],
|
| 80 |
+
"hour": [self.hour],
|
| 81 |
+
"weekday": [self.weekday],
|
| 82 |
+
"week": [self.week],
|
| 83 |
+
"total_sales": [self.total_sales],
|
| 84 |
+
"peak_period_level": [self.peak_period_level],
|
| 85 |
+
"overall_demand_level": [self.overall_demand_level],
|
| 86 |
+
"RecencySegment": [self.RecencySegment],
|
| 87 |
+
"FrequencySegment": [self.FrequencySegment],
|
| 88 |
+
"MonetarySegment": [self.MonetarySegment],
|
| 89 |
+
"country_purchasing_power": [self.country_purchasing_power],
|
| 90 |
+
"sales_level_by_country": [self.sales_level_by_country],
|
| 91 |
+
"adjusted_unit_price": [self.adjusted_unit_price],
|
| 92 |
+
}
|
| 93 |
+
return pd.DataFrame(data_dict)
|
| 94 |
+
except Exception as e:
|
| 95 |
+
raise CustomException(e, sys)
|