from fastapi import FastAPI, HTTPException from fastapi.middleware.cors import CORSMiddleware from pydantic import BaseModel import numpy as np import joblib import requests from io import BytesIO app = FastAPI() # Add CORS middleware app.add_middleware( CORSMiddleware, allow_origins=["*"], allow_credentials=True, allow_methods=["*"], allow_headers=["*"], ) MODEL_URL = "https://huggingface.co/GodfreyOwino/NPK_needs_mode2/resolve/main/npk_needs_model.joblib" try: response = requests.get(MODEL_URL) response.raise_for_status() model_bytes = BytesIO(response.content) model = joblib.load(model_bytes) print("Model loaded successfully") except Exception as e: print(f"Error loading model: {e}") print(f"Error type: {type(e)}") print(f"Error details: {str(e)}") raise HTTPException(status_code=500, detail="Unable to load the model.") class InputData(BaseModel): features: list[float] @app.post("/predict", tags=["Prediction"]) async def predict(data: InputData): try: input_data = np.array(data.features).reshape(1, -1) prediction = model.predict(input_data) return {"prediction": prediction.tolist()} except Exception as e: raise HTTPException(status_code=500, detail=f"Prediction error: {str(e)}") @app.get("/", tags=["Root"]) async def root(): return {"message": "NPK Needs Prediction Model API"}