Spaces:
Sleeping
Sleeping
update dockerfile
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- 01d1a50b9ae858ac9bce7ade16fa74512f94fd +0 -0
- 0ba5f24071de74933fb3dcbafc1f8c1c1cbd12 +0 -0
- 1240b98ed69c04a3e9d59db645ef8e97edc12d +0 -0
- 142db7d0bba5750cbf9d2f56e9bcad443ea6b5 +0 -0
- 1b251536fa09d28d36b8f7476f6453a7667628 +3 -0
- 2d38bf3c6a186c78eae26361deb7aae51b1a8c +0 -0
- 338539cc975ea9ce37941545cc031d4e94fcd0 +0 -0
- 344aac8c09253b3b630fb776ae94478aa0275b +2 -0
- 40841ac768b5e51df9d30213a589ffe6439f7d +5 -0
- 65ab4c4bd40cae9973417b5b8d4c0c1edd7fc7 +0 -0
- 6ee8951c7546c7bc5876304b0f287036ad5361 +2 -0
- 7e1f7b4564f1249151b684110e5ae429969e82 +0 -0
- 7fb3351b345190fa253cc10972815b04e223a5 +0 -0
- 82015897d315ee0c1caf9a7c6afc88e9c8b7e7 +0 -0
- 99da7d986114fd50a3067f9845aca74cb976bc +0 -0
- 9de29bb2d1d6434b8b29ae775ad8c2e48c5391 +0 -0
- Base-RCNN-FPN.yaml +18 -0
- COMMIT_EDITMSG +1 -0
- Dockerfile +60 -0
- HEAD +1 -0
- Procfile +1 -0
- app.py +109 -0
- applypatch-msg.sample +15 -0
- b04af489e5e04b3bff84cdaee0263a27dbfc27 +0 -0
- b4ed774368af89d654c9f01850d769e6cf9f52 +0 -0
- b64513a3a8c89484e90c1ce7d6107205dfa2bb +0 -0
- bdf7525023a228fbbd08bd7d70506adad8615a +0 -0
- c4a86461311423cae31efe9bdc20de13e6472f +1 -0
- carDamageDetectionFinal +1 -0
- commit-msg.sample +24 -0
- config +11 -0
- config.yaml +9 -0
- deploy.Dockerfile +32 -0
- description +1 -0
- docker-compose.yaml +26 -0
- e0060b2b78b26e4cef9631a04e84db4eb2c567 +0 -0
- e251e270498f1aed0bd37ea8b7cd1df50317e5 +0 -0
- e98358175e37113dc547068ceb67a9cd7e9865 +1 -0
- exclude +6 -0
- f9e8490251dbc56e631a22442c501537ca0113 +0 -0
- fsmonitor-watchman.sample +174 -0
- index +0 -0
- index.html +91 -0
- install_detectron2.sh +0 -0
- kaggle.json +1 -0
- kaggle.json:Zone.Identifier +3 -0
- main +1 -0
- master +1 -0
- packed-refs +3 -0
- post-update.sample +8 -0
01d1a50b9ae858ac9bce7ade16fa74512f94fd
ADDED
Binary file (35 Bytes). View file
|
|
0ba5f24071de74933fb3dcbafc1f8c1c1cbd12
ADDED
Binary file (55 Bytes). View file
|
|
1240b98ed69c04a3e9d59db645ef8e97edc12d
ADDED
Binary file (1.28 kB). View file
|
|
142db7d0bba5750cbf9d2f56e9bcad443ea6b5
ADDED
Binary file (1.52 kB). View file
|
|
1b251536fa09d28d36b8f7476f6453a7667628
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
x��K
|
2 |
+
1D]��$���� zL��@h�o`n`��A�*�5��\d�@���:ow46���G���e�&%�(��R��g�,�m�^`g��<^-�q+���0y\���t~
|
3 |
+
��V�a�|��Q?�.9
|
2d38bf3c6a186c78eae26361deb7aae51b1a8c
ADDED
Binary file (1.2 kB). View file
|
|
338539cc975ea9ce37941545cc031d4e94fcd0
ADDED
Binary file (72 Bytes). View file
|
|
344aac8c09253b3b630fb776ae94478aa0275b
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
x��1�0E�9E�H�B�%�SB�$�JOO�|�/����bvU�6����K�CT�k�R����0��J��Mm��d]H(��@����W�Q
|
2 |
+
�[��#��+�
|
40841ac768b5e51df9d30213a589ffe6439f7d
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
x5M�
|
2 |
+
�0����W-m��W�
|
3 |
+
~��u[��W�ncoq
|
4 |
+
!��$]��l���1�PB����>^�Q�
|
5 |
+
';���왠w�ْ������1��gD<�=a.ɵ��� ��s�4���GGv9ǭLu>3D�
|
65ab4c4bd40cae9973417b5b8d4c0c1edd7fc7
ADDED
Binary file (348 Bytes). View file
|
|
6ee8951c7546c7bc5876304b0f287036ad5361
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
x5��
|
2 |
+
�0E]�W�ՐD7ٺU��4M�i&��ҿ7X�a`����=�(?�}��J]�`B�㦔d�Ix:�iUJ�o7.��Q����"�"���
|
7e1f7b4564f1249151b684110e5ae429969e82
ADDED
Binary file (515 Bytes). View file
|
|
7fb3351b345190fa253cc10972815b04e223a5
ADDED
Binary file (163 Bytes). View file
|
|
82015897d315ee0c1caf9a7c6afc88e9c8b7e7
ADDED
Binary file (29 Bytes). View file
|
|
99da7d986114fd50a3067f9845aca74cb976bc
ADDED
Binary file (79 Bytes). View file
|
|
9de29bb2d1d6434b8b29ae775ad8c2e48c5391
ADDED
Binary file (15 Bytes). View file
|
|
Base-RCNN-FPN.yaml
ADDED
@@ -0,0 +1,18 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MODEL:
|
2 |
+
META_ARCHITECTURE: "GeneralizedRCNN"
|
3 |
+
RPN:
|
4 |
+
PRE_NMS_TOPK_TEST: 6000
|
5 |
+
POST_NMS_TOPK_TEST: 1000
|
6 |
+
ROI_HEADS:
|
7 |
+
NAME: "Res5ROIHeads"
|
8 |
+
DATASETS:
|
9 |
+
TRAIN: ("coco_2017_train",)
|
10 |
+
TEST: ("coco_2017_val",)
|
11 |
+
SOLVER:
|
12 |
+
IMS_PER_BATCH: 16
|
13 |
+
BASE_LR: 0.02
|
14 |
+
STEPS: (60000, 80000)
|
15 |
+
MAX_ITER: 90000
|
16 |
+
INPUT:
|
17 |
+
MIN_SIZE_TRAIN: (640, 672, 704, 736, 768, 800)
|
18 |
+
VERSION: 2
|
COMMIT_EDITMSG
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
update dockerfile
|
Dockerfile
ADDED
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
FROM nvidia/cuda:11.1.1-cudnn8-devel-ubuntu18.04
|
2 |
+
# use an older system (18.04) to avoid opencv incompatibility (issue#3524)
|
3 |
+
|
4 |
+
ENV DEBIAN_FRONTEND noninteractive
|
5 |
+
RUN apt-get update && apt-get install -y \
|
6 |
+
python3-opencv ca-certificates python3-dev git wget sudo ninja-build
|
7 |
+
RUN ln -sv /usr/bin/python3 /usr/bin/python
|
8 |
+
|
9 |
+
# create a non-root user
|
10 |
+
ARG USER_ID=1000
|
11 |
+
RUN useradd -m --no-log-init --system --uid ${USER_ID} appuser -g sudo
|
12 |
+
RUN echo '%sudo ALL=(ALL) NOPASSWD:ALL' >> /etc/sudoers
|
13 |
+
USER appuser
|
14 |
+
WORKDIR /home/appuser
|
15 |
+
|
16 |
+
ENV PATH="/home/appuser/.local/bin:${PATH}"
|
17 |
+
RUN wget https://bootstrap.pypa.io/pip/3.6/get-pip.py && \
|
18 |
+
python3 get-pip.py --user && \
|
19 |
+
rm get-pip.py
|
20 |
+
|
21 |
+
# install dependencies
|
22 |
+
# See https://pytorch.org/ for other options if you use a different version of CUDA
|
23 |
+
RUN pip install --user tensorboard cmake onnx # cmake from apt-get is too old
|
24 |
+
RUN pip install --user torch==1.10 torchvision==0.11.1 -f https://download.pytorch.org/whl/cu111/torch_stable.html
|
25 |
+
|
26 |
+
RUN pip install --user 'git+https://github.com/facebookresearch/fvcore'
|
27 |
+
# install detectron2
|
28 |
+
RUN git clone https://github.com/facebookresearch/detectron2 detectron2_repo
|
29 |
+
# set FORCE_CUDA because during `docker build` cuda is not accessible
|
30 |
+
ENV FORCE_CUDA="1"
|
31 |
+
# This will by default build detectron2 for all common cuda architectures and take a lot more time,
|
32 |
+
# because inside `docker build`, there is no way to tell which architecture will be used.
|
33 |
+
ARG TORCH_CUDA_ARCH_LIST="Kepler;Kepler+Tesla;Maxwell;Maxwell+Tegra;Pascal;Volta;Turing"
|
34 |
+
ENV TORCH_CUDA_ARCH_LIST="${TORCH_CUDA_ARCH_LIST}"
|
35 |
+
|
36 |
+
RUN pip install --user -e detectron2_repo
|
37 |
+
|
38 |
+
# Set a fixed model cache directory.
|
39 |
+
ENV FVCORE_CACHE="/tmp"
|
40 |
+
WORKDIR /home/appuser/detectron2_repo
|
41 |
+
|
42 |
+
# run detectron2 under user "appuser":
|
43 |
+
# wget http://images.cocodataset.org/val2017/000000439715.jpg -O input.jpg
|
44 |
+
# python3 demo/demo.py \
|
45 |
+
#--config-file configs/COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x.yaml \
|
46 |
+
#--input input.jpg --output outputs/ \
|
47 |
+
#--opts MODEL.WEIGHTS detectron2://COCO-InstanceSegmentation/mask_rcnn_R_50_FPN_3x/137849600/model_final_f10217.pkl
|
48 |
+
FROM python:3.9
|
49 |
+
|
50 |
+
RUN useradd -m -u 1000 user
|
51 |
+
USER user
|
52 |
+
ENV PATH="/home/user/.local/bin:$PATH"
|
53 |
+
|
54 |
+
WORKDIR /app
|
55 |
+
|
56 |
+
COPY --chown=user ./requirements.txt requirements.txt
|
57 |
+
RUN pip install --no-cache-dir --upgrade -r requirements.txt
|
58 |
+
|
59 |
+
COPY --chown=user . /app
|
60 |
+
CMD ["uvicorn", "app:app", "--host", "0.0.0.0", "--port", "7860"]
|
HEAD
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
ref: refs/remotes/origin/main
|
Procfile
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
web: gunicorn app:app
|
app.py
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from flask import Flask, request, jsonify, render_template
|
2 |
+
from detectron2main.config import get_cfg
|
3 |
+
from detectron2main.engine import DefaultPredictor
|
4 |
+
from detectron2main.data import MetadataCatalog
|
5 |
+
from detectron2main.utils.visualizer import Visualizer, ColorMode
|
6 |
+
import numpy as np
|
7 |
+
from PIL import Image
|
8 |
+
import io
|
9 |
+
import os
|
10 |
+
import requests
|
11 |
+
import gdown
|
12 |
+
|
13 |
+
# Initialize Flask app
|
14 |
+
app = Flask(__name__)
|
15 |
+
cfg = None
|
16 |
+
# Google Drive file URL
|
17 |
+
GDRIVE_MODEL_URL = "https://drive.google.com/uc?id=18aEDo-kWOBhg8mAhnbpFkuM6bmmrBH4E" # Replace 'your-file-id' with the actual file ID from Google Drive
|
18 |
+
LOCAL_MODEL_PATH = "model_final.pth"
|
19 |
+
def download_file_from_google_drive(id, destination):
|
20 |
+
gdown.download(GDRIVE_MODEL_URL, LOCAL_MODEL_PATH, quiet=False)
|
21 |
+
|
22 |
+
|
23 |
+
file_id = '18aEDo-kWOBhg8mAhnbpFkuM6bmmrBH4E'
|
24 |
+
destination = 'model_final.pth'
|
25 |
+
download_file_from_google_drive(file_id, destination)
|
26 |
+
|
27 |
+
# Download model from Google Drive if not already present locally
|
28 |
+
def download_model():
|
29 |
+
if not os.path.exists(LOCAL_MODEL_PATH):
|
30 |
+
response = requests.get(GDRIVE_MODEL_URL, stream=True)
|
31 |
+
if response.status_code == 200:
|
32 |
+
with open(LOCAL_MODEL_PATH, 'wb') as f:
|
33 |
+
f.write(response.content)
|
34 |
+
else:
|
35 |
+
raise Exception(f"Failed to download model from Google Drive: {response.status_code}")
|
36 |
+
|
37 |
+
# Configuration and model setup
|
38 |
+
def setup_model(model_path):
|
39 |
+
global cfg
|
40 |
+
cfg = get_cfg()
|
41 |
+
cfg.merge_from_file("config.yaml") # Update with the config file path
|
42 |
+
cfg.MODEL.WEIGHTS = model_path
|
43 |
+
cfg.MODEL.ROI_HEADS.SCORE_THRESH_TEST = 0.5
|
44 |
+
cfg.MODEL.DEVICE = "cpu" # Use "cuda" for GPU
|
45 |
+
return DefaultPredictor(cfg)
|
46 |
+
|
47 |
+
# Ensure model is available
|
48 |
+
predictor = setup_model(LOCAL_MODEL_PATH)
|
49 |
+
|
50 |
+
# Define expected parts and costs
|
51 |
+
expected_parts = ['headlamp', 'rear_bumper', 'door', 'hood', 'front_bumper']
|
52 |
+
cost_dict = {
|
53 |
+
'headlamp': 300,
|
54 |
+
'rear_bumper': 250,
|
55 |
+
'door': 200,
|
56 |
+
'hood': 220,
|
57 |
+
'front_bumper': 250,
|
58 |
+
'other': 150
|
59 |
+
}
|
60 |
+
|
61 |
+
@app.route('/')
|
62 |
+
def home():
|
63 |
+
return render_template('index.html')
|
64 |
+
|
65 |
+
@app.route('/upload', methods=['POST'])
|
66 |
+
def upload():
|
67 |
+
if 'file' not in request.files:
|
68 |
+
return jsonify({"error": "No file uploaded"}), 400
|
69 |
+
|
70 |
+
file = request.files['file']
|
71 |
+
if file.filename == '':
|
72 |
+
return jsonify({"error": "No file selected"}), 400
|
73 |
+
|
74 |
+
# Load image
|
75 |
+
image = Image.open(file).convert("RGB")
|
76 |
+
image_np = np.array(image)
|
77 |
+
|
78 |
+
# Run model prediction
|
79 |
+
outputs = predictor(image_np)
|
80 |
+
instances = outputs["instances"].to("cpu")
|
81 |
+
class_names = MetadataCatalog.get(cfg.DATASETS.TEST[0]).thing_classes
|
82 |
+
|
83 |
+
# Initialize total cost
|
84 |
+
total_cost = 0
|
85 |
+
damage_details = []
|
86 |
+
|
87 |
+
for j in range(len(instances)):
|
88 |
+
class_id = instances.pred_classes[j].item()
|
89 |
+
damaged_part = class_names[class_id] if class_id < len(class_names) else 'unknown'
|
90 |
+
if damaged_part not in expected_parts:
|
91 |
+
damaged_part = 'other'
|
92 |
+
|
93 |
+
repair_cost = cost_dict.get(damaged_part, cost_dict['other'])
|
94 |
+
total_cost += repair_cost
|
95 |
+
|
96 |
+
damage_details.append({
|
97 |
+
'part': damaged_part,
|
98 |
+
'cost_usd': repair_cost
|
99 |
+
})
|
100 |
+
|
101 |
+
response = {
|
102 |
+
"damages": damage_details,
|
103 |
+
"total_cost": total_cost
|
104 |
+
}
|
105 |
+
|
106 |
+
return jsonify(response)
|
107 |
+
|
108 |
+
if __name__ == '__main__':
|
109 |
+
app.run(debug=True)
|
applypatch-msg.sample
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/sh
|
2 |
+
#
|
3 |
+
# An example hook script to check the commit log message taken by
|
4 |
+
# applypatch from an e-mail message.
|
5 |
+
#
|
6 |
+
# The hook should exit with non-zero status after issuing an
|
7 |
+
# appropriate message if it wants to stop the commit. The hook is
|
8 |
+
# allowed to edit the commit message file.
|
9 |
+
#
|
10 |
+
# To enable this hook, rename this file to "applypatch-msg".
|
11 |
+
|
12 |
+
. git-sh-setup
|
13 |
+
commitmsg="$(git rev-parse --git-path hooks/commit-msg)"
|
14 |
+
test -x "$commitmsg" && exec "$commitmsg" ${1+"$@"}
|
15 |
+
:
|
b04af489e5e04b3bff84cdaee0263a27dbfc27
ADDED
Binary file (268 Bytes). View file
|
|
b4ed774368af89d654c9f01850d769e6cf9f52
ADDED
Binary file (614 Bytes). View file
|
|
b64513a3a8c89484e90c1ce7d6107205dfa2bb
ADDED
Binary file (817 Bytes). View file
|
|
bdf7525023a228fbbd08bd7d70506adad8615a
ADDED
Binary file (515 Bytes). View file
|
|
c4a86461311423cae31efe9bdc20de13e6472f
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
x=��M�0�a�S�4`"� ��+*�g��q<�=�L�@��@V+q��������O/!r��ȗԮiKW8��*a�w�����Eb��k�-�'J��{y��Z'�F}Z#O&+�Z+��s*TTP�il�t#�@V����X�5-�Tn��P�����aX����9 ņ��g������C�����P
|
carDamageDetectionFinal
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Subproject commit c07fb3351b345190fa253cc10972815b04e223a5
|
commit-msg.sample
ADDED
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/sh
|
2 |
+
#
|
3 |
+
# An example hook script to check the commit log message.
|
4 |
+
# Called by "git commit" with one argument, the name of the file
|
5 |
+
# that has the commit message. The hook should exit with non-zero
|
6 |
+
# status after issuing an appropriate message if it wants to stop the
|
7 |
+
# commit. The hook is allowed to edit the commit message file.
|
8 |
+
#
|
9 |
+
# To enable this hook, rename this file to "commit-msg".
|
10 |
+
|
11 |
+
# Uncomment the below to add a Signed-off-by line to the message.
|
12 |
+
# Doing this in a hook is a bad idea in general, but the prepare-commit-msg
|
13 |
+
# hook is more suited to it.
|
14 |
+
#
|
15 |
+
# SOB=$(git var GIT_AUTHOR_IDENT | sed -n 's/^\(.*>\).*$/Signed-off-by: \1/p')
|
16 |
+
# grep -qs "^$SOB" "$1" || echo "$SOB" >> "$1"
|
17 |
+
|
18 |
+
# This example catches duplicate Signed-off-by lines.
|
19 |
+
|
20 |
+
test "" = "$(grep '^Signed-off-by: ' "$1" |
|
21 |
+
sort | uniq -c | sed -e '/^[ ]*1[ ]/d')" || {
|
22 |
+
echo >&2 Duplicate Signed-off-by lines.
|
23 |
+
exit 1
|
24 |
+
}
|
config
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[core]
|
2 |
+
repositoryformatversion = 0
|
3 |
+
filemode = true
|
4 |
+
bare = false
|
5 |
+
logallrefupdates = true
|
6 |
+
[remote "origin"]
|
7 |
+
url = https://huggingface.co/spaces/reidddd/carDamageDetection
|
8 |
+
fetch = +refs/heads/*:refs/remotes/origin/*
|
9 |
+
[branch "main"]
|
10 |
+
remote = origin
|
11 |
+
merge = refs/heads/main
|
config.yaml
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
_BASE_: "Base-RCNN-FPN.yaml"
|
2 |
+
MODEL:
|
3 |
+
WEIGHTS: "detectron2://ImageNetPretrained/MSRA/R-50.pkl"
|
4 |
+
MASK_ON: True
|
5 |
+
RESNETS:
|
6 |
+
DEPTH: 50
|
7 |
+
SOLVER:
|
8 |
+
STEPS: (210000, 250000)
|
9 |
+
MAX_ITER: 270000
|
deploy.Dockerfile
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Copyright (c) Facebook, Inc. and its affiliates.
|
2 |
+
# This file defines a container that compiles the C++ examples of detectron2.
|
3 |
+
# See docker/README.md for usage.
|
4 |
+
|
5 |
+
# Depends on the image produced by "./Dockerfile"
|
6 |
+
FROM detectron2:v0
|
7 |
+
|
8 |
+
USER appuser
|
9 |
+
ENV HOME=/home/appuser
|
10 |
+
WORKDIR $HOME
|
11 |
+
|
12 |
+
# Let torchvision find libtorch
|
13 |
+
ENV CMAKE_PREFIX_PATH=$HOME/.local/lib/python3.6/site-packages/torch/
|
14 |
+
|
15 |
+
RUN sudo apt-get update && sudo apt-get install libopencv-dev --yes
|
16 |
+
|
17 |
+
# install libtorchvision
|
18 |
+
RUN git clone --branch v0.11.1 https://github.com/pytorch/vision/
|
19 |
+
RUN mkdir vision/build && cd vision/build && \
|
20 |
+
cmake .. -DCMAKE_INSTALL_PREFIX=$HOME/.local -DCMAKE_BUILD_TYPE=Release -DWITH_CUDA=on -DTORCH_CUDA_ARCH_LIST=$TORCH_CUDA_ARCH_LIST && \
|
21 |
+
make -j && make install
|
22 |
+
|
23 |
+
# make our installation take effect
|
24 |
+
ENV CPATH=$HOME/.local/include \
|
25 |
+
LIBRARY_PATH=$HOME/.local/lib \
|
26 |
+
LD_LIBRARY_PATH=$HOME/.local/lib
|
27 |
+
|
28 |
+
|
29 |
+
# build C++ examples of detectron2
|
30 |
+
RUN cd detectron2_repo/tools/deploy && mkdir build && cd build && \
|
31 |
+
cmake -DTORCH_CUDA_ARCH_LIST=$TORCH_CUDA_ARCH_LIST .. && make
|
32 |
+
# binaries will be available under tools/deploy/build
|
description
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Unnamed repository; edit this file 'description' to name the repository.
|
docker-compose.yaml
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
version: "2.3"
|
2 |
+
services:
|
3 |
+
detectron2:
|
4 |
+
build:
|
5 |
+
context: .
|
6 |
+
dockerfile: Dockerfile
|
7 |
+
args:
|
8 |
+
USER_ID: ${USER_ID:-1000}
|
9 |
+
deploy:
|
10 |
+
resources:
|
11 |
+
reservations:
|
12 |
+
devices:
|
13 |
+
- capabilities:
|
14 |
+
- gpu
|
15 |
+
shm_size: "8gb"
|
16 |
+
ulimits:
|
17 |
+
memlock: -1
|
18 |
+
stack: 67108864
|
19 |
+
volumes:
|
20 |
+
- /tmp/.X11-unix:/tmp/.X11-unix:ro
|
21 |
+
environment:
|
22 |
+
- DISPLAY=$DISPLAY
|
23 |
+
- NVIDIA_VISIBLE_DEVICES=all
|
24 |
+
# Uncomment with proper source to access webcam from docker
|
25 |
+
# devices:
|
26 |
+
# - /dev/video0:/dev/video0
|
e0060b2b78b26e4cef9631a04e84db4eb2c567
ADDED
Binary file (1.14 kB). View file
|
|
e251e270498f1aed0bd37ea8b7cd1df50317e5
ADDED
Binary file (29 Bytes). View file
|
|
e98358175e37113dc547068ceb67a9cd7e9865
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
x���0E]�/�4�1!��@��4m��H��օo�m��~j!��U�&7�T�����8Jܯz۪������nf����i����+�4����cT�Wq�x̉|��4�*᧦&��Q&3� 6(c2LE� �-^�g̖�Lͧu��F�n��kS��p�t?�<4@
|
exclude
ADDED
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# git ls-files --others --exclude-from=.git/info/exclude
|
2 |
+
# Lines that start with '#' are comments.
|
3 |
+
# For a project mostly in C, the following would be a good set of
|
4 |
+
# exclude patterns (uncomment them if you want to use them):
|
5 |
+
# *.[oa]
|
6 |
+
# *~
|
f9e8490251dbc56e631a22442c501537ca0113
ADDED
Binary file (92 Bytes). View file
|
|
fsmonitor-watchman.sample
ADDED
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/perl
|
2 |
+
|
3 |
+
use strict;
|
4 |
+
use warnings;
|
5 |
+
use IPC::Open2;
|
6 |
+
|
7 |
+
# An example hook script to integrate Watchman
|
8 |
+
# (https://facebook.github.io/watchman/) with git to speed up detecting
|
9 |
+
# new and modified files.
|
10 |
+
#
|
11 |
+
# The hook is passed a version (currently 2) and last update token
|
12 |
+
# formatted as a string and outputs to stdout a new update token and
|
13 |
+
# all files that have been modified since the update token. Paths must
|
14 |
+
# be relative to the root of the working tree and separated by a single NUL.
|
15 |
+
#
|
16 |
+
# To enable this hook, rename this file to "query-watchman" and set
|
17 |
+
# 'git config core.fsmonitor .git/hooks/query-watchman'
|
18 |
+
#
|
19 |
+
my ($version, $last_update_token) = @ARGV;
|
20 |
+
|
21 |
+
# Uncomment for debugging
|
22 |
+
# print STDERR "$0 $version $last_update_token\n";
|
23 |
+
|
24 |
+
# Check the hook interface version
|
25 |
+
if ($version ne 2) {
|
26 |
+
die "Unsupported query-fsmonitor hook version '$version'.\n" .
|
27 |
+
"Falling back to scanning...\n";
|
28 |
+
}
|
29 |
+
|
30 |
+
my $git_work_tree = get_working_dir();
|
31 |
+
|
32 |
+
my $retry = 1;
|
33 |
+
|
34 |
+
my $json_pkg;
|
35 |
+
eval {
|
36 |
+
require JSON::XS;
|
37 |
+
$json_pkg = "JSON::XS";
|
38 |
+
1;
|
39 |
+
} or do {
|
40 |
+
require JSON::PP;
|
41 |
+
$json_pkg = "JSON::PP";
|
42 |
+
};
|
43 |
+
|
44 |
+
launch_watchman();
|
45 |
+
|
46 |
+
sub launch_watchman {
|
47 |
+
my $o = watchman_query();
|
48 |
+
if (is_work_tree_watched($o)) {
|
49 |
+
output_result($o->{clock}, @{$o->{files}});
|
50 |
+
}
|
51 |
+
}
|
52 |
+
|
53 |
+
sub output_result {
|
54 |
+
my ($clockid, @files) = @_;
|
55 |
+
|
56 |
+
# Uncomment for debugging watchman output
|
57 |
+
# open (my $fh, ">", ".git/watchman-output.out");
|
58 |
+
# binmode $fh, ":utf8";
|
59 |
+
# print $fh "$clockid\n@files\n";
|
60 |
+
# close $fh;
|
61 |
+
|
62 |
+
binmode STDOUT, ":utf8";
|
63 |
+
print $clockid;
|
64 |
+
print "\0";
|
65 |
+
local $, = "\0";
|
66 |
+
print @files;
|
67 |
+
}
|
68 |
+
|
69 |
+
sub watchman_clock {
|
70 |
+
my $response = qx/watchman clock "$git_work_tree"/;
|
71 |
+
die "Failed to get clock id on '$git_work_tree'.\n" .
|
72 |
+
"Falling back to scanning...\n" if $? != 0;
|
73 |
+
|
74 |
+
return $json_pkg->new->utf8->decode($response);
|
75 |
+
}
|
76 |
+
|
77 |
+
sub watchman_query {
|
78 |
+
my $pid = open2(\*CHLD_OUT, \*CHLD_IN, 'watchman -j --no-pretty')
|
79 |
+
or die "open2() failed: $!\n" .
|
80 |
+
"Falling back to scanning...\n";
|
81 |
+
|
82 |
+
# In the query expression below we're asking for names of files that
|
83 |
+
# changed since $last_update_token but not from the .git folder.
|
84 |
+
#
|
85 |
+
# To accomplish this, we're using the "since" generator to use the
|
86 |
+
# recency index to select candidate nodes and "fields" to limit the
|
87 |
+
# output to file names only. Then we're using the "expression" term to
|
88 |
+
# further constrain the results.
|
89 |
+
my $last_update_line = "";
|
90 |
+
if (substr($last_update_token, 0, 1) eq "c") {
|
91 |
+
$last_update_token = "\"$last_update_token\"";
|
92 |
+
$last_update_line = qq[\n"since": $last_update_token,];
|
93 |
+
}
|
94 |
+
my $query = <<" END";
|
95 |
+
["query", "$git_work_tree", {$last_update_line
|
96 |
+
"fields": ["name"],
|
97 |
+
"expression": ["not", ["dirname", ".git"]]
|
98 |
+
}]
|
99 |
+
END
|
100 |
+
|
101 |
+
# Uncomment for debugging the watchman query
|
102 |
+
# open (my $fh, ">", ".git/watchman-query.json");
|
103 |
+
# print $fh $query;
|
104 |
+
# close $fh;
|
105 |
+
|
106 |
+
print CHLD_IN $query;
|
107 |
+
close CHLD_IN;
|
108 |
+
my $response = do {local $/; <CHLD_OUT>};
|
109 |
+
|
110 |
+
# Uncomment for debugging the watch response
|
111 |
+
# open ($fh, ">", ".git/watchman-response.json");
|
112 |
+
# print $fh $response;
|
113 |
+
# close $fh;
|
114 |
+
|
115 |
+
die "Watchman: command returned no output.\n" .
|
116 |
+
"Falling back to scanning...\n" if $response eq "";
|
117 |
+
die "Watchman: command returned invalid output: $response\n" .
|
118 |
+
"Falling back to scanning...\n" unless $response =~ /^\{/;
|
119 |
+
|
120 |
+
return $json_pkg->new->utf8->decode($response);
|
121 |
+
}
|
122 |
+
|
123 |
+
sub is_work_tree_watched {
|
124 |
+
my ($output) = @_;
|
125 |
+
my $error = $output->{error};
|
126 |
+
if ($retry > 0 and $error and $error =~ m/unable to resolve root .* directory (.*) is not watched/) {
|
127 |
+
$retry--;
|
128 |
+
my $response = qx/watchman watch "$git_work_tree"/;
|
129 |
+
die "Failed to make watchman watch '$git_work_tree'.\n" .
|
130 |
+
"Falling back to scanning...\n" if $? != 0;
|
131 |
+
$output = $json_pkg->new->utf8->decode($response);
|
132 |
+
$error = $output->{error};
|
133 |
+
die "Watchman: $error.\n" .
|
134 |
+
"Falling back to scanning...\n" if $error;
|
135 |
+
|
136 |
+
# Uncomment for debugging watchman output
|
137 |
+
# open (my $fh, ">", ".git/watchman-output.out");
|
138 |
+
# close $fh;
|
139 |
+
|
140 |
+
# Watchman will always return all files on the first query so
|
141 |
+
# return the fast "everything is dirty" flag to git and do the
|
142 |
+
# Watchman query just to get it over with now so we won't pay
|
143 |
+
# the cost in git to look up each individual file.
|
144 |
+
my $o = watchman_clock();
|
145 |
+
$error = $output->{error};
|
146 |
+
|
147 |
+
die "Watchman: $error.\n" .
|
148 |
+
"Falling back to scanning...\n" if $error;
|
149 |
+
|
150 |
+
output_result($o->{clock}, ("/"));
|
151 |
+
$last_update_token = $o->{clock};
|
152 |
+
|
153 |
+
eval { launch_watchman() };
|
154 |
+
return 0;
|
155 |
+
}
|
156 |
+
|
157 |
+
die "Watchman: $error.\n" .
|
158 |
+
"Falling back to scanning...\n" if $error;
|
159 |
+
|
160 |
+
return 1;
|
161 |
+
}
|
162 |
+
|
163 |
+
sub get_working_dir {
|
164 |
+
my $working_dir;
|
165 |
+
if ($^O =~ 'msys' || $^O =~ 'cygwin') {
|
166 |
+
$working_dir = Win32::GetCwd();
|
167 |
+
$working_dir =~ tr/\\/\//;
|
168 |
+
} else {
|
169 |
+
require Cwd;
|
170 |
+
$working_dir = Cwd::cwd();
|
171 |
+
}
|
172 |
+
|
173 |
+
return $working_dir;
|
174 |
+
}
|
index
ADDED
Binary file (217 Bytes). View file
|
|
index.html
ADDED
@@ -0,0 +1,91 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
<!DOCTYPE html>
|
2 |
+
<html lang="en">
|
3 |
+
<head>
|
4 |
+
<meta charset="UTF-8">
|
5 |
+
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
6 |
+
<title>Upload File</title>
|
7 |
+
<style>
|
8 |
+
body {
|
9 |
+
font-family: Arial, sans-serif;
|
10 |
+
text-align: center;
|
11 |
+
margin-top: 50px;
|
12 |
+
}
|
13 |
+
#preview {
|
14 |
+
margin-top: 20px;
|
15 |
+
max-width: 500px;
|
16 |
+
max-height: 500px;
|
17 |
+
display: none;
|
18 |
+
}
|
19 |
+
</style>
|
20 |
+
</head>
|
21 |
+
<body>
|
22 |
+
<h1>Vehicle Damage Detection</h1>
|
23 |
+
<form id="uploadForm" enctype="multipart/form-data">
|
24 |
+
<label for="file">Upload an image:</label>
|
25 |
+
<input type="file" id="file" name="file" accept="image/*" required>
|
26 |
+
<br><br>
|
27 |
+
<img id="preview" alt="Image Preview">
|
28 |
+
<br><br>
|
29 |
+
<button type="submit">Upload and Analyze</button>
|
30 |
+
</form>
|
31 |
+
<p id="response"></p>
|
32 |
+
|
33 |
+
<script>
|
34 |
+
const fileInput = document.getElementById('file');
|
35 |
+
const preview = document.getElementById('preview');
|
36 |
+
const uploadForm = document.getElementById('uploadForm');
|
37 |
+
const responseElement = document.getElementById('response');
|
38 |
+
|
39 |
+
// Preview the selected image
|
40 |
+
fileInput.addEventListener('change', function () {
|
41 |
+
const file = fileInput.files[0];
|
42 |
+
if (file) {
|
43 |
+
const reader = new FileReader();
|
44 |
+
reader.onload = function (e) {
|
45 |
+
preview.src = e.target.result;
|
46 |
+
preview.style.display = 'block';
|
47 |
+
};
|
48 |
+
reader.readAsDataURL(file);
|
49 |
+
} else {
|
50 |
+
preview.style.display = 'none';
|
51 |
+
}
|
52 |
+
});
|
53 |
+
|
54 |
+
// Handle form submission
|
55 |
+
uploadForm.addEventListener('submit', async function (event) {
|
56 |
+
event.preventDefault();
|
57 |
+
|
58 |
+
const formData = new FormData();
|
59 |
+
formData.append('file', fileInput.files[0]);
|
60 |
+
|
61 |
+
responseElement.textContent = 'Uploading and analyzing...';
|
62 |
+
|
63 |
+
try {
|
64 |
+
const response = await fetch('/upload', {
|
65 |
+
method: 'POST',
|
66 |
+
body: formData
|
67 |
+
});
|
68 |
+
|
69 |
+
if (response.ok) {
|
70 |
+
const result = await response.json();
|
71 |
+
responseElement.innerHTML = `
|
72 |
+
<strong>Analysis Result:</strong><br>
|
73 |
+
Total Cost: $${result.total_cost}<br>
|
74 |
+
<ul>
|
75 |
+
${result.damages.map(damage => `
|
76 |
+
<li>
|
77 |
+
Part: ${damage.part}, Area: ${damage.area_pixels} pixels, Cost: $${damage.cost_usd}
|
78 |
+
</li>
|
79 |
+
`).join('')}
|
80 |
+
</ul>
|
81 |
+
`;
|
82 |
+
} else {
|
83 |
+
responseElement.textContent = 'Error: Unable to analyze the image.';
|
84 |
+
}
|
85 |
+
} catch (error) {
|
86 |
+
responseElement.textContent = 'Error: ' + error.message;
|
87 |
+
}
|
88 |
+
});
|
89 |
+
</script>
|
90 |
+
</body>
|
91 |
+
</html>
|
install_detectron2.sh
ADDED
File without changes
|
kaggle.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"username":"reiddson","key":"c0103af93ef7241309fc2308824981e0"}
|
kaggle.json:Zone.Identifier
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
[ZoneTransfer]
|
2 |
+
ZoneId=3
|
3 |
+
HostUrl=https://www.kaggle.com/
|
main
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
e0b64513a3a8c89484e90c1ce7d6107205dfa2bb
|
master
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
c07fb3351b345190fa253cc10972815b04e223a5
|
packed-refs
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
# pack-refs with: peeled fully-peeled sorted
|
2 |
+
e0b64513a3a8c89484e90c1ce7d6107205dfa2bb refs/remotes/origin/main
|
3 |
+
c07fb3351b345190fa253cc10972815b04e223a5 refs/remotes/origin/master
|
post-update.sample
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/sh
|
2 |
+
#
|
3 |
+
# An example hook script to prepare a packed repository for use over
|
4 |
+
# dumb transports.
|
5 |
+
#
|
6 |
+
# To enable this hook, rename this file to "post-update".
|
7 |
+
|
8 |
+
exec git update-server-info
|