Jaiking001 commited on
Commit
9333188
·
verified ·
1 Parent(s): 2987148

upload 3 files

Browse files
Files changed (3) hide show
  1. predict_breed.py +20 -0
  2. requirements.txt +6 -0
  3. train_dog_breed.py +72 -0
predict_breed.py ADDED
@@ -0,0 +1,20 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import tensorflow as tf
2
+ from tensorflow.keras.preprocessing import image
3
+ import numpy as np
4
+ import json
5
+
6
+ def predict_breed(img_path):
7
+ model = tf.keras.models.load_model("dog_breed_classifier.h5")
8
+ with open("class_indices.json", "r") as f:
9
+ class_indices = json.load(f)
10
+ class_names = {v: k for k, v in class_indices.items()}
11
+
12
+ img = image.load_img(img_path, target_size=(224, 224))
13
+ img_array = image.img_to_array(img) / 255.0
14
+ img_array = np.expand_dims(img_array, axis=0)
15
+
16
+ prediction = model.predict(img_array)
17
+ predicted_class = class_names[np.argmax(prediction)]
18
+ print(f"Predicted Dog Breed: {predicted_class}")
19
+
20
+ predict_breed(r"E:\Dog_Breed_Classification_model\dataset\Akita_Inu\Akita_Inu1.jpg")
requirements.txt ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ tensorflow>=2.11.0
2
+ scikit-learn
3
+ numpy
4
+ pillow
5
+ matplotlib
6
+ tqdm
train_dog_breed.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import shutil
3
+ import random
4
+ import numpy as np
5
+ import json
6
+ from sklearn.model_selection import train_test_split
7
+ import tensorflow as tf
8
+ from tensorflow.keras.preprocessing.image import ImageDataGenerator
9
+
10
+ SOURCE_DIR = r"E:\Dog_Breed_Classification_model\dataset"
11
+ TARGET_DIR = r"E:\Dog_Breed_Classification_model\dataset_split"
12
+ IMG_SIZE = (224, 224)
13
+ BATCH_SIZE = 16
14
+ EPOCHS = 20
15
+ SEED = 42
16
+
17
+ def split_dataset():
18
+ random.seed(SEED)
19
+ for split in ['train', 'val']:
20
+ os.makedirs(os.path.join(TARGET_DIR, split), exist_ok=True)
21
+
22
+ for breed in os.listdir(SOURCE_DIR):
23
+ class_path = os.path.join(SOURCE_DIR, breed)
24
+ if not os.path.isdir(class_path):
25
+ continue
26
+ images = [img for img in os.listdir(class_path) if img.lower().endswith(('.jpg', '.jpeg', '.png'))]
27
+ train_imgs, val_imgs = train_test_split(images, test_size=0.2, random_state=SEED)
28
+
29
+ for split, img_list in zip(['train', 'val'], [train_imgs, val_imgs]):
30
+ split_dir = os.path.join(TARGET_DIR, split, breed)
31
+ os.makedirs(split_dir, exist_ok=True)
32
+ for img in img_list:
33
+ shutil.copy2(os.path.join(class_path, img), os.path.join(split_dir, img))
34
+
35
+ def create_generators():
36
+ train_gen = ImageDataGenerator(rescale=1./255, rotation_range=15, width_shift_range=0.1,
37
+ height_shift_range=0.1, horizontal_flip=True)
38
+ val_gen = ImageDataGenerator(rescale=1./255)
39
+
40
+ train_data = train_gen.flow_from_directory(os.path.join(TARGET_DIR, 'train'),
41
+ target_size=IMG_SIZE, batch_size=BATCH_SIZE,
42
+ class_mode='categorical')
43
+
44
+ val_data = val_gen.flow_from_directory(os.path.join(TARGET_DIR, 'val'),
45
+ target_size=IMG_SIZE, batch_size=BATCH_SIZE,
46
+ class_mode='categorical')
47
+ return train_data, val_data
48
+
49
+ def build_model(num_classes):
50
+ base_model = tf.keras.applications.MobileNetV2(input_shape=(*IMG_SIZE, 3), include_top=False, weights='imagenet')
51
+ base_model.trainable = False
52
+ model = tf.keras.Sequential([
53
+ base_model,
54
+ tf.keras.layers.GlobalAveragePooling2D(),
55
+ tf.keras.layers.Dense(128, activation='relu'),
56
+ tf.keras.layers.Dropout(0.3),
57
+ tf.keras.layers.Dense(num_classes, activation='softmax')
58
+ ])
59
+ model.compile(optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy'])
60
+ return model
61
+
62
+ def save_model(model, class_indices):
63
+ model.save("dog_breed_classifier.h5")
64
+ with open("class_indices.json", "w") as f:
65
+ json.dump(class_indices, f)
66
+
67
+ if __name__ == "__main__":
68
+ split_dataset()
69
+ train_data, val_data = create_generators()
70
+ model = build_model(num_classes=len(train_data.class_indices))
71
+ model.fit(train_data, validation_data=val_data, epochs=EPOCHS)
72
+ save_model(model, train_data.class_indices)