Spaces:
Build error
Build error
Upload 2 files
Browse files- app.py +89 -0
- requirements.txt +3 -0
app.py
ADDED
|
@@ -0,0 +1,89 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import cv2
|
| 2 |
+
import numpy as np
|
| 3 |
+
import gradio as gr
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
def apply_gaussian_blur(frame, intensity):
|
| 7 |
+
ksize = int(intensity) * 2 + 1
|
| 8 |
+
return cv2.GaussianBlur(frame, (ksize, ksize), 0)
|
| 9 |
+
|
| 10 |
+
def apply_sharpening_filter(frame):
|
| 11 |
+
kernel = np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]])
|
| 12 |
+
return cv2.filter2D(frame, -1, kernel)
|
| 13 |
+
|
| 14 |
+
def apply_edge_detection(frame):
|
| 15 |
+
return cv2.Canny(frame, 100, 200)
|
| 16 |
+
|
| 17 |
+
def apply_invert_filter(frame):
|
| 18 |
+
return cv2.bitwise_not(frame)
|
| 19 |
+
|
| 20 |
+
def adjust_brightness_contrast(frame, alpha=1.0, beta=0):
|
| 21 |
+
return cv2.convertScaleAbs(frame, alpha=alpha, beta=beta)
|
| 22 |
+
|
| 23 |
+
def apply_grayscale_filter(frame):
|
| 24 |
+
return cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
|
| 25 |
+
|
| 26 |
+
def apply_sepia_filter(frame):
|
| 27 |
+
sepia_filter = np.array([[0.272, 0.534, 0.131],
|
| 28 |
+
[0.349, 0.686, 0.168],
|
| 29 |
+
[0.393, 0.769, 0.189]])
|
| 30 |
+
sepia_frame = cv2.transform(frame, sepia_filter)
|
| 31 |
+
sepia_frame = np.clip(sepia_frame, 0, 255)
|
| 32 |
+
return sepia_frame
|
| 33 |
+
|
| 34 |
+
def apply_fall_filter(frame):
|
| 35 |
+
fall_filter = np.array([[0.393, 0.769, 0.189],
|
| 36 |
+
[0.349, 0.686, 0.168],
|
| 37 |
+
[0.272, 0.534, 0.131]])
|
| 38 |
+
fall_frame = cv2.transform(frame, fall_filter)
|
| 39 |
+
fall_frame = np.clip(fall_frame, 0, 255)
|
| 40 |
+
return fall_frame
|
| 41 |
+
|
| 42 |
+
def apply_filter(filter_types, input_image, blur_intensity=1, brightness=1.0, contrast=50):
|
| 43 |
+
frame = input_image.copy()
|
| 44 |
+
|
| 45 |
+
for filter_type in filter_types:
|
| 46 |
+
if filter_type == "Gaussian Blur":
|
| 47 |
+
frame = apply_gaussian_blur(frame, blur_intensity)
|
| 48 |
+
elif filter_type == "Sharpen":
|
| 49 |
+
frame = apply_sharpening_filter(frame)
|
| 50 |
+
elif filter_type == "Edge Detection":
|
| 51 |
+
frame = apply_edge_detection(frame)
|
| 52 |
+
elif filter_type == "Invert":
|
| 53 |
+
frame = apply_invert_filter(frame)
|
| 54 |
+
elif filter_type == "Brightness/Contrast":
|
| 55 |
+
frame = adjust_brightness_contrast(frame, alpha=brightness, beta=contrast)
|
| 56 |
+
elif filter_type == "Grayscale":
|
| 57 |
+
frame = apply_grayscale_filter(frame)
|
| 58 |
+
elif filter_type == "Sepia":
|
| 59 |
+
frame = apply_sepia_filter(frame)
|
| 60 |
+
elif filter_type == "Sonbahar":
|
| 61 |
+
frame = apply_fall_filter(frame)
|
| 62 |
+
|
| 63 |
+
return frame
|
| 64 |
+
|
| 65 |
+
|
| 66 |
+
with gr.Blocks() as demo:
|
| 67 |
+
gr.Markdown("# Gelişmiş Web Kameradan Canlı Filtreleme")
|
| 68 |
+
|
| 69 |
+
|
| 70 |
+
filter_types = gr.CheckboxGroup(
|
| 71 |
+
label="Filtre Seçin",
|
| 72 |
+
choices=["Gaussian Blur", "Sharpen", "Edge Detection", "Invert", "Brightness/Contrast", "Grayscale", "Sepia", "Sonbahar"],
|
| 73 |
+
value=["Gaussian Blur"]
|
| 74 |
+
)
|
| 75 |
+
|
| 76 |
+
blur_intensity = gr.Slider(label="Gaussian Blur Yoğunluğu", minimum=1, maximum=10, step=1, value=1)
|
| 77 |
+
brightness = gr.Slider(label="Parlaklık", minimum=0.5, maximum=2.0, step=0.1, value=1.0)
|
| 78 |
+
contrast = gr.Slider(label="Kontrast", minimum=0, maximum=100, step=10, value=50)
|
| 79 |
+
|
| 80 |
+
input_image = gr.Image(label="Resim Yükle", type="numpy")
|
| 81 |
+
|
| 82 |
+
output_image = gr.Image(label="Filtre Uygulandı")
|
| 83 |
+
|
| 84 |
+
apply_button = gr.Button("Filtreyi Uygula")
|
| 85 |
+
|
| 86 |
+
apply_button.click(fn=apply_filter, inputs=[filter_types, input_image, blur_intensity, brightness, contrast], outputs=output_image)
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
demo.launch()
|
requirements.txt
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
opencv-python
|
| 2 |
+
numpy
|
| 3 |
+
gradio
|