Update main.py
Browse files
main.py
CHANGED
@@ -1,11 +1,10 @@
|
|
1 |
from fastapi import FastAPI, File, UploadFile, Form
|
2 |
-
from fastapi.responses import StreamingResponse
|
3 |
from fastapi.staticfiles import StaticFiles
|
4 |
import torch
|
5 |
-
import shutil
|
6 |
import cv2
|
7 |
import numpy as np
|
8 |
-
import
|
9 |
from io import BytesIO
|
10 |
|
11 |
app = FastAPI()
|
@@ -19,6 +18,9 @@ def load_model():
|
|
19 |
model = Model(device='cuda' if torch.cuda.is_available() else 'cpu')
|
20 |
model.load_model('cartoon4')
|
21 |
|
|
|
|
|
|
|
22 |
@app.post("/upload/")
|
23 |
async def process_image(file: UploadFile = File(...), top: int = Form(...), bottom: int = Form(...), left: int = Form(...), right: int = Form(...)):
|
24 |
global model
|
@@ -30,27 +32,35 @@ async def process_image(file: UploadFile = File(...), top: int = Form(...), bott
|
|
30 |
|
31 |
# Convert the uploaded image to numpy array
|
32 |
nparr = np.frombuffer(contents, np.uint8)
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
|
35 |
# Process the uploaded image
|
36 |
-
aligned_face, instyle, message = model.detect_and_align_image(
|
37 |
-
|
|
|
|
|
38 |
|
39 |
-
|
40 |
-
|
|
|
|
|
41 |
|
42 |
# Convert processed image to bytes
|
43 |
-
_, encoded_image = cv2.imencode('.jpg',
|
44 |
|
45 |
# Return the processed image as a streaming response
|
46 |
return StreamingResponse(BytesIO(encoded_image.tobytes()), media_type="image/jpeg")
|
47 |
|
48 |
-
|
49 |
# Mount static files directory
|
50 |
app.mount("/", StaticFiles(directory="AB", html=True), name="static")
|
51 |
|
52 |
# Define index route
|
53 |
@app.get("/")
|
54 |
def index():
|
55 |
-
return FileResponse(path="/app/AB/index.html", media_type="text/html")
|
56 |
-
|
|
|
1 |
from fastapi import FastAPI, File, UploadFile, Form
|
2 |
+
from fastapi.responses import StreamingResponse, FileResponse
|
3 |
from fastapi.staticfiles import StaticFiles
|
4 |
import torch
|
|
|
5 |
import cv2
|
6 |
import numpy as np
|
7 |
+
import logging
|
8 |
from io import BytesIO
|
9 |
|
10 |
app = FastAPI()
|
|
|
18 |
model = Model(device='cuda' if torch.cuda.is_available() else 'cpu')
|
19 |
model.load_model('cartoon4')
|
20 |
|
21 |
+
# Configure logging
|
22 |
+
logging.basicConfig(level=logging.INFO)
|
23 |
+
|
24 |
@app.post("/upload/")
|
25 |
async def process_image(file: UploadFile = File(...), top: int = Form(...), bottom: int = Form(...), left: int = Form(...), right: int = Form(...)):
|
26 |
global model
|
|
|
32 |
|
33 |
# Convert the uploaded image to numpy array
|
34 |
nparr = np.frombuffer(contents, np.uint8)
|
35 |
+
frame = cv2.imdecode(nparr, cv2.IMREAD_UNCHANGED) # Use IMREAD_UNCHANGED to keep the original format
|
36 |
+
|
37 |
+
if frame is None:
|
38 |
+
logging.error("Failed to decode the image.")
|
39 |
+
return {"error": "Failed to decode the image. Please ensure the file is a valid image format."}
|
40 |
+
|
41 |
+
logging.info(f"Uploaded image shape: {frame.shape}")
|
42 |
|
43 |
# Process the uploaded image
|
44 |
+
aligned_face, instyle, message = model.detect_and_align_image(frame, top, bottom, left, right)
|
45 |
+
if aligned_face is None or instyle is None:
|
46 |
+
logging.error("Failed to process the image: No face detected or alignment failed.")
|
47 |
+
return {"error": message}
|
48 |
|
49 |
+
processed_image, message = model.image_toonify(aligned_face, instyle, model.exstyle, style_degree=0.5, style_type='cartoon1')
|
50 |
+
if processed_image is None:
|
51 |
+
logging.error("Failed to toonify the image.")
|
52 |
+
return {"error": message}
|
53 |
|
54 |
# Convert processed image to bytes
|
55 |
+
_, encoded_image = cv2.imencode('.jpg', processed_image)
|
56 |
|
57 |
# Return the processed image as a streaming response
|
58 |
return StreamingResponse(BytesIO(encoded_image.tobytes()), media_type="image/jpeg")
|
59 |
|
|
|
60 |
# Mount static files directory
|
61 |
app.mount("/", StaticFiles(directory="AB", html=True), name="static")
|
62 |
|
63 |
# Define index route
|
64 |
@app.get("/")
|
65 |
def index():
|
66 |
+
return FileResponse(path="/app/AB/index.html", media_type="text/html")
|
|