ak0601 commited on
Commit
cb4b4a2
·
verified ·
1 Parent(s): f4fa05b

Upload 3 files

Browse files
Files changed (3) hide show
  1. Dockerfile +13 -0
  2. app.py +75 -0
  3. requirements.txt +45 -0
Dockerfile ADDED
@@ -0,0 +1,13 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9
2
+
3
+ RUN useradd -m -u 1000 user
4
+ USER user
5
+ ENV PATH="/home/user/.local/bin:$PATH"
6
+
7
+ WORKDIR /app
8
+
9
+ COPY --chown=user ./requirements.txt requirements.txt
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ COPY --chown=user . /app
13
+ CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
app.py ADDED
@@ -0,0 +1,75 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pickle
2
+ import pandas as pd
3
+ from sklearn.linear_model import LogisticRegression
4
+ from sklearn.metrics import classification_report, accuracy_score
5
+ from sklearn.model_selection import train_test_split
6
+ from fastapi import FastAPI, UploadFile, File, HTTPException
7
+ from pydantic import BaseModel
8
+ import io
9
+
10
+ app = FastAPI()
11
+ data = None
12
+
13
+ # Function to train the model
14
+ def train_aut(data):
15
+ data['Downtime_Flag'] = data['Downtime_Flag'].map({'Yes': 1, 'No': 0})
16
+ X = data[['Temperature', 'Run_Time']]
17
+ y = data['Downtime_Flag']
18
+ X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2, random_state=42)
19
+ model = LogisticRegression()
20
+ model.fit(X_train, y_train)
21
+ with open('model.pkl', 'wb') as file:
22
+ pickle.dump(model, file)
23
+ y_pred = model.predict(X_test)
24
+ accuracy = accuracy_score(y_test, y_pred)
25
+ f1 = classification_report(y_test, y_pred, output_dict=True)['1']['f1-score']
26
+ return accuracy, f1
27
+
28
+ # Function to make predictions
29
+ def predict_aut(temp, run_time):
30
+ try:
31
+ with open('model.pkl', 'rb') as file:
32
+ model = pickle.load(file)
33
+ input_data = [[temp, run_time]]
34
+ y_pred = model.predict(input_data)
35
+ return 'Yes' if y_pred[0] == 1 else 'No'
36
+ except FileNotFoundError:
37
+ raise HTTPException(status_code=400, detail="Model not trained. Please upload data and train the model first.")
38
+
39
+ # Pydantic model for prediction input
40
+ class PredictionInput(BaseModel):
41
+ Temperature: float
42
+ Run_Time: float
43
+
44
+ @app.post("/upload")
45
+ async def upload(file: UploadFile = File(...)):
46
+ try:
47
+ global data
48
+ contents = await file.read()
49
+ data = pd.read_csv(io.StringIO(contents.decode("utf-8")))
50
+ return {"message": "File uploaded successfully."}
51
+ except Exception as e:
52
+ raise HTTPException(status_code=400, detail=f"Error reading file: {str(e)}")
53
+
54
+ @app.post("/train")
55
+ def train():
56
+ global data
57
+ if data is None:
58
+ raise HTTPException(status_code=400, detail="No data uploaded. Please upload a dataset first.")
59
+ try:
60
+ accuracy, f1 = train_aut(data)
61
+ return {"message": "Model trained successfully.", "accuracy": accuracy, "f1_score": f1}
62
+ except Exception as e:
63
+ raise HTTPException(status_code=500, detail=f"Error during training: {str(e)}")
64
+
65
+ @app.post("/predict")
66
+ def predict(input_data: PredictionInput):
67
+ try:
68
+ result = predict_aut(input_data.Temperature, input_data.Run_Time)
69
+ return {"Downtime": result}
70
+ except Exception as e:
71
+ raise HTTPException(status_code=500, detail=f"Error during prediction: {str(e)}")
72
+
73
+ if __name__ == "__main__":
74
+ import uvicorn
75
+ uvicorn.run(app, host="0.0.0.0", port=8000)
requirements.txt ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ annotated-types==0.7.0
2
+ anyio==4.8.0
3
+ certifi==2024.12.14
4
+ click==8.1.8
5
+ colorama==0.4.6
6
+ dnspython==2.7.0
7
+ email_validator==2.2.0
8
+ exceptiongroup==1.2.2
9
+ fastapi==0.115.6
10
+ fastapi-cli==0.0.7
11
+ h11==0.14.0
12
+ httpcore==1.0.7
13
+ httptools==0.6.4
14
+ httpx==0.28.1
15
+ idna==3.10
16
+ Jinja2==3.1.5
17
+ joblib==1.4.2
18
+ markdown-it-py==3.0.0
19
+ MarkupSafe==3.0.2
20
+ mdurl==0.1.2
21
+ numpy==2.2.2
22
+ pandas==2.2.3
23
+ pydantic==2.10.5
24
+ pydantic_core==2.27.2
25
+ Pygments==2.19.1
26
+ python-dateutil==2.9.0.post0
27
+ python-dotenv==1.0.1
28
+ python-multipart==0.0.20
29
+ pytz==2024.2
30
+ PyYAML==6.0.2
31
+ rich==13.9.4
32
+ rich-toolkit==0.13.2
33
+ scikit-learn==1.6.1
34
+ scipy==1.15.1
35
+ shellingham==1.5.4
36
+ six==1.17.0
37
+ sniffio==1.3.1
38
+ starlette==0.41.3
39
+ threadpoolctl==3.5.0
40
+ typer==0.15.1
41
+ typing_extensions==4.12.2
42
+ tzdata==2024.2
43
+ uvicorn==0.34.0
44
+ watchfiles==1.0.4
45
+ websockets==14.2