chkp-talexm
commited on
Commit
·
fa91fe4
1
Parent(s):
1878895
updateing placeholders
Browse files- app.py +2 -0
- model-loader.py +0 -75
- modelConnector.py +104 -0
app.py
CHANGED
@@ -3,6 +3,8 @@ import pandas as pd
|
|
3 |
import joblib
|
4 |
from huggingface_hub import hf_hub_download
|
5 |
|
|
|
|
|
6 |
# ===========================
|
7 |
# LOAD MODEL & DATASET
|
8 |
# ===========================
|
|
|
3 |
import joblib
|
4 |
from huggingface_hub import hf_hub_download
|
5 |
|
6 |
+
from modelConnector import ModelConnector
|
7 |
+
|
8 |
# ===========================
|
9 |
# LOAD MODEL & DATASET
|
10 |
# ===========================
|
model-loader.py
DELETED
@@ -1,75 +0,0 @@
|
|
1 |
-
import os
|
2 |
-
import joblib
|
3 |
-
import pandas as pd
|
4 |
-
from huggingface_hub import hf_hub_download, HfApi
|
5 |
-
|
6 |
-
|
7 |
-
app = Flask(__name__)
|
8 |
-
|
9 |
-
# Hugging Face model & dataset repos
|
10 |
-
MODEL_REPO = "taimax13/is_click_predictor"
|
11 |
-
MODEL_FILENAME = "rf_model.pkl"
|
12 |
-
DATA_REPO = "taimax13/is_click_data"
|
13 |
-
LOCAL_MODEL_PATH = f"models/{MODEL_FILENAME}"
|
14 |
-
|
15 |
-
# Hugging Face API
|
16 |
-
api = HfApi()
|
17 |
-
|
18 |
-
|
19 |
-
# ===========================
|
20 |
-
# CHECK IF MODEL EXISTS
|
21 |
-
# ===========================
|
22 |
-
|
23 |
-
@app.route("/check_model", methods=["GET"])
|
24 |
-
def check_model():
|
25 |
-
"""Check if a model exists on Hugging Face"""
|
26 |
-
try:
|
27 |
-
model_path = hf_hub_download(repo_id=MODEL_REPO, filename=MODEL_FILENAME)
|
28 |
-
return jsonify({"status": "found", "message": "Model exists", "path": model_path})
|
29 |
-
except Exception:
|
30 |
-
return jsonify({"status": "not_found", "message": "Model not found, needs training"})
|
31 |
-
|
32 |
-
|
33 |
-
# ===========================
|
34 |
-
# TRAIN A NEW MODEL
|
35 |
-
# ===========================
|
36 |
-
|
37 |
-
@app.route("/train", methods=["POST"])
|
38 |
-
def train():
|
39 |
-
"""Train a new model and upload it to Hugging Face"""
|
40 |
-
try:
|
41 |
-
# Load training dataset
|
42 |
-
train_data_path = hf_hub_download(repo_id=DATA_REPO, filename="train_dataset_full.csv")
|
43 |
-
train_data = pd.read_csv(train_data_path)
|
44 |
-
|
45 |
-
X_train = train_data.drop(columns=["is_click"])
|
46 |
-
y_train = train_data["is_click"]
|
47 |
-
|
48 |
-
# Train model
|
49 |
-
models = train_models(X_train, y_train)
|
50 |
-
rf_model = models["RandomForest"]
|
51 |
-
|
52 |
-
# Save locally
|
53 |
-
os.makedirs("models", exist_ok=True)
|
54 |
-
joblib.dump(rf_model, LOCAL_MODEL_PATH)
|
55 |
-
|
56 |
-
# Upload to Hugging Face
|
57 |
-
api.upload_file(
|
58 |
-
path_or_fileobj=LOCAL_MODEL_PATH,
|
59 |
-
path_in_repo=MODEL_FILENAME,
|
60 |
-
repo_id=MODEL_REPO,
|
61 |
-
)
|
62 |
-
|
63 |
-
return jsonify({"status": "success", "message": "Model trained and uploaded!"})
|
64 |
-
|
65 |
-
except Exception as e:
|
66 |
-
return jsonify({"status": "error", "message": str(e)})
|
67 |
-
|
68 |
-
|
69 |
-
# ===========================
|
70 |
-
# RETRAIN EXISTING MODEL
|
71 |
-
# ===========================
|
72 |
-
|
73 |
-
@app.route("/retrain", methods=["POST"])
|
74 |
-
def retrain():
|
75 |
-
"""Retrain the model using
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
modelConnector.py
ADDED
@@ -0,0 +1,104 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import joblib
|
3 |
+
import pandas as pd
|
4 |
+
from huggingface_hub import hf_hub_download, HfApi
|
5 |
+
from model_trainer import train_models # Assumes model_trainer.py exists with train_models function
|
6 |
+
|
7 |
+
# Hugging Face Model & Dataset Information
|
8 |
+
MODEL_REPO = "taimax13/is_click_predictor"
|
9 |
+
MODEL_FILENAME = "rf_model.pkl"
|
10 |
+
DATA_REPO = "taimax13/is_click_data"
|
11 |
+
LOCAL_MODEL_PATH = f"models/{MODEL_FILENAME}"
|
12 |
+
|
13 |
+
# Hugging Face API
|
14 |
+
api = HfApi()
|
15 |
+
|
16 |
+
|
17 |
+
class ModelConnector:
|
18 |
+
def __init__(self):
|
19 |
+
"""Initialize model connector and check if model exists."""
|
20 |
+
os.makedirs("models", exist_ok=True)
|
21 |
+
self.model = self.load_model()
|
22 |
+
|
23 |
+
def check_model_exists(self):
|
24 |
+
"""Check if the model exists on Hugging Face."""
|
25 |
+
try:
|
26 |
+
hf_hub_download(repo_id=MODEL_REPO, filename=MODEL_FILENAME)
|
27 |
+
return True
|
28 |
+
except Exception:
|
29 |
+
return False
|
30 |
+
|
31 |
+
def load_model(self):
|
32 |
+
"""Download and load the model from Hugging Face."""
|
33 |
+
if self.check_model_exists():
|
34 |
+
model_path = hf_hub_download(repo_id=MODEL_REPO, filename=MODEL_FILENAME)
|
35 |
+
return joblib.load(model_path)
|
36 |
+
return None
|
37 |
+
|
38 |
+
def train_model(self):
|
39 |
+
"""Train a new model and upload it to Hugging Face."""
|
40 |
+
try:
|
41 |
+
# Load dataset
|
42 |
+
train_data_path = hf_hub_download(repo_id=DATA_REPO, filename="train_dataset_full.csv")
|
43 |
+
train_data = pd.read_csv(train_data_path)
|
44 |
+
|
45 |
+
X_train = train_data.drop(columns=["is_click"])
|
46 |
+
y_train = train_data["is_click"]
|
47 |
+
|
48 |
+
# Train model
|
49 |
+
models = train_models(X_train, y_train)
|
50 |
+
rf_model = models["RandomForest"]
|
51 |
+
|
52 |
+
# Save locally
|
53 |
+
joblib.dump(rf_model, LOCAL_MODEL_PATH)
|
54 |
+
|
55 |
+
# Upload to Hugging Face
|
56 |
+
api.upload_file(
|
57 |
+
path_or_fileobj=LOCAL_MODEL_PATH,
|
58 |
+
path_in_repo=MODEL_FILENAME,
|
59 |
+
repo_id=MODEL_REPO,
|
60 |
+
)
|
61 |
+
|
62 |
+
self.model = rf_model # Update instance with trained model
|
63 |
+
return "Model trained and uploaded successfully!"
|
64 |
+
|
65 |
+
except Exception as e:
|
66 |
+
return f"Error during training: {str(e)}"
|
67 |
+
|
68 |
+
def retrain_model(self):
|
69 |
+
"""Retrain the existing model with new data."""
|
70 |
+
try:
|
71 |
+
# Load dataset
|
72 |
+
train_data_path = hf_hub_download(repo_id=DATA_REPO, filename="train_dataset_full.csv")
|
73 |
+
train_data = pd.read_csv(train_data_path)
|
74 |
+
|
75 |
+
X_train = train_data.drop(columns=["is_click"])
|
76 |
+
y_train = train_data["is_click"]
|
77 |
+
|
78 |
+
if self.model is None:
|
79 |
+
return "No existing model found. Train a new model first."
|
80 |
+
|
81 |
+
# Retrain the model
|
82 |
+
self.model.fit(X_train, y_train)
|
83 |
+
|
84 |
+
# Save & upload retrained model
|
85 |
+
joblib.dump(self.model, LOCAL_MODEL_PATH)
|
86 |
+
api.upload_file(
|
87 |
+
path_or_fileobj=LOCAL_MODEL_PATH,
|
88 |
+
path_in_repo=MODEL_FILENAME,
|
89 |
+
repo_id=MODEL_REPO,
|
90 |
+
)
|
91 |
+
|
92 |
+
return "Model retrained and uploaded successfully!"
|
93 |
+
|
94 |
+
except Exception as e:
|
95 |
+
return f"Error during retraining: {str(e)}"
|
96 |
+
|
97 |
+
def predict(self, input_data):
|
98 |
+
"""Make predictions using the loaded model."""
|
99 |
+
if self.model is None:
|
100 |
+
return "No model found. Train the model first."
|
101 |
+
|
102 |
+
input_df = pd.DataFrame([input_data])
|
103 |
+
prediction = self.model.predict(input_df)[0]
|
104 |
+
return int(prediction)
|