Learn A-Z Deep Learning in 15 Days
What is Transfer Learning?
Transfer learning is a machine learning method where a model developed for a task is reused as the starting point for a model on a second task. It is a popular approach in deep learning where pre-trained models are used as the starting point. The pre-trained model is a model created by someone else to solve a similar problem. Instead of building a model from scratch to solve a similar problem, you use the model trained on other problems as a starting point.
Imp:-Use Kaggle Kernel to Run Code. Download the Cat Vs Dog dataset from the link below.
import cv2
import os
import numpy as np
import os
import numpy as np
Unzip the Dataset
!unzip ../input/train.zip
!unzip ../input/test1.zip
Import the Train
IMAGE_WIDTH = 128
IMAGE_HEIGHT = 128
IMAGE_CHANNELS = 1
IMAGE_SIZE=(IMAGE_WIDTH, IMAGE_HEIGHT)
directory = "/kaggle/working/train"
data = []
label = []
for filename in os.listdir(directory):
image = cv2.imread(directory+r'/'+filename,0)
if image is None:
continue
image = cv2.resize(image,IMAGE_SIZE)
category = filename.split('.')[0]
if category == 'dog':
label.append(1)
else:
label.append(0)
data.append(image/255)
IMAGE_HEIGHT = 128
IMAGE_CHANNELS = 1
IMAGE_SIZE=(IMAGE_WIDTH, IMAGE_HEIGHT)
directory = "/kaggle/working/train"
data = []
label = []
for filename in os.listdir(directory):
image = cv2.imread(directory+r'/'+filename,0)
if image is None:
continue
image = cv2.resize(image,IMAGE_SIZE)
category = filename.split('.')[0]
if category == 'dog':
label.append(1)
else:
label.append(0)
data.append(image/255)
List to Array Conversion
data=np.array(data)
data=data.reshape((data.shape)[0],(data.shape)[1],(data.shape)[2],1)
label=np.array(label)
print(data.shape)
print(label.shape)
data=data.reshape((data.shape)[0],(data.shape)[1],(data.shape)[2],1)
label=np.array(label)
print(data.shape)
print(label.shape)
Train Test Split
from sklearn.model_selection import train_test_split
x_train, x_val, y_train, y_val = train_test_split(data, label, test_size=0.3, random_state=42)
x_train, x_val, y_train, y_val = train_test_split(data, label, test_size=0.3, random_state=42)
from keras.utils import np_utils
y_train = np_utils.to_categorical(y_train,num_classes=2)
y_val = np_utils.to_categorical(y_val,num_classes=2)
y_train = np_utils.to_categorical(y_train,num_classes=2)
y_val = np_utils.to_categorical(y_val,num_classes=2)
Transfer learnikng
from keras import applications
# load the ResNet-50 network, ensuring the head FC layer sets are left off
baseModel = applications.VGG16(weights = 'imagenet',
include_top = False,
pooling = None,
input_shape = (IMAGE_WIDTH, IMAGE_HEIGHT, IMAGE_CHANNELS))
## You can use applications.ResNet50, applications.VGG16 pooling='avg'
baseModel.summary()
# load the ResNet-50 network, ensuring the head FC layer sets are left off
baseModel = applications.VGG16(weights = 'imagenet',
include_top = False,
pooling = None,
input_shape = (IMAGE_WIDTH, IMAGE_HEIGHT, IMAGE_CHANNELS))
## You can use applications.ResNet50, applications.VGG16 pooling='avg'
baseModel.summary()
from keras import layers
from keras import models
from keras import optimizers
# construct the head of the model that will be placed on top of the the base model
headModel = baseModel.output
headModel = Flatten(name="flatten")(headModel)
headModel = Dense(512, activation="relu")(headModel)
headModel = Dropout(0.5)(headModel)
headModel = Dense(2, activation="softmax")(headModel)
# place the head FC model on top of the base model (this will become
# the actual model we will train)
model = Model(inputs=baseModel.input, outputs=headModel)
# loop over all layers in the base model and freeze them so they will
# *not* be updated during the training process
for layer in baseModel.layers:
layer.trainable = False
model.summary()
model.compile(loss='categorical_crossentropy',optimizer=optimizers.adam(lr=1e-8),metrics=['acc'])
from keras.callbacks import ModelCheckpoint, EarlyStopping
​
filepath = "./cp-{epoch:02d}.h5"
​
checkpoint = ModelCheckpoint(filepath,
monitor="val_loss",
mode="min",
save_best_only = True,
verbose=1)
​
​
earlystop = EarlyStopping(monitor = 'val_loss',
min_delta = 0,
patience =4,
verbose = 1,
restore_best_weights = True)
​
# put our call backs into a callback list
callbacks = [earlystop, checkpoint]
​
filepath = "./cp-{epoch:02d}.h5"
​
checkpoint = ModelCheckpoint(filepath,
monitor="val_loss",
mode="min",
save_best_only = True,
verbose=1)
​
​
earlystop = EarlyStopping(monitor = 'val_loss',
min_delta = 0,
patience =4,
verbose = 1,
restore_best_weights = True)
​
# put our call backs into a callback list
callbacks = [earlystop, checkpoint]
Start the Training
model.fit(x_train,y_train,validation_data=(x_val,y_val),epochs=50,batch_size=64, callbacks = callbacks)
### Graph Epoch vs acc AND Epoch vs Loss
import matplotlib.pyplot as plt
​
acc = model.history.history['acc']
val_acc = model.history.history['val_acc']
loss = model.history.history['loss']
val_loss = model.history.history['val_loss']
epochs = range(1, len(acc) + 1)
plt.plot(epochs, acc, 'blue', label='Training acc')
plt.plot(epochs, val_acc, 'red', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.figure()
plt.plot(epochs, loss, 'blue', label='Training loss')
plt.plot(epochs, val_loss, 'red', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
import matplotlib.pyplot as plt
​
acc = model.history.history['acc']
val_acc = model.history.history['val_acc']
loss = model.history.history['loss']
val_loss = model.history.history['val_loss']
epochs = range(1, len(acc) + 1)
plt.plot(epochs, acc, 'blue', label='Training acc')
plt.plot(epochs, val_acc, 'red', label='Validation acc')
plt.title('Training and validation accuracy')
plt.legend()
plt.figure()
plt.plot(epochs, loss, 'blue', label='Training loss')
plt.plot(epochs, val_loss, 'red', label='Validation loss')
plt.title('Training and validation loss')
plt.legend()
plt.show()
IMAGE_WIDTH = 128
IMAGE_HEIGHT = 128
IMAGE_CHANNELS = 1
IMAGE_SIZE=(IMAGE_WIDTH, IMAGE_HEIGHT)
​
directory = "/kaggle/working/test1"
data = []
​
​
for filename in os.listdir(directory):
​
image = cv2.imread(directory+r'/'+filename,0)
if image is None:
continue
image = cv2.resize(image,IMAGE_SIZE)
data.append(image/255)
IMAGE_HEIGHT = 128
IMAGE_CHANNELS = 1
IMAGE_SIZE=(IMAGE_WIDTH, IMAGE_HEIGHT)
​
directory = "/kaggle/working/test1"
data = []
​
​
for filename in os.listdir(directory):
​
image = cv2.imread(directory+r'/'+filename,0)
if image is None:
continue
image = cv2.resize(image,IMAGE_SIZE)
data.append(image/255)
data = np.array(data)
test_image = np.array(data.reshape((data.shape)[0],(data.shape)[1],(data.shape)[2],1))
test_image = np.array(data.reshape((data.shape)[0],(data.shape)[1],(data.shape)[2],1))
predictions = model.predict(test_image)
results = np.argmax(predictions, axis = 1)
results = np.argmax(predictions, axis = 1)
key={0:'cat',1:'dog'}
label_prediction=[key[r] for r in results]
import matplotlib.image as img
import matplotlib.pyplot as plt
​
nb_rows = 3
nb_cols = 3
fig, axs = plt.subplots(nb_rows, nb_cols, figsize=(6, 6), dpi =100)
​
n = 0
for i in range(0, nb_rows):
for j in range(0, nb_cols):
axs[i,j].set_title(label_prediction[n])
axs[i,j].imshow(data[n],cmap = "gray")
n += 1
plt.tight_layout()
plt.show()
import matplotlib.pyplot as plt
​
nb_rows = 3
nb_cols = 3
fig, axs = plt.subplots(nb_rows, nb_cols, figsize=(6, 6), dpi =100)
​
n = 0
for i in range(0, nb_rows):
for j in range(0, nb_cols):
axs[i,j].set_title(label_prediction[n])
axs[i,j].imshow(data[n],cmap = "gray")
n += 1
plt.tight_layout()
plt.show()
import pandas as pd
​
df=pd.DataFrame(data={'imagename':os.listdir(directory), 'predicted_labels': label_prediction})
df.head()
​
df=pd.DataFrame(data={'imagename':os.listdir(directory), 'predicted_labels': label_prediction})
df.head()
df.to_csv('submission_new_model.csv', index=False, header=True)
In the next blog, we will start Deep Learning Classification with Object Detection.
https://sngurukuls247.blogspot.com/2020/05/deep-learning-5-objection-detection.html
Feel free contact me on-
Email - sn.gurukul24.7uk@gmail.com
No comments:
Post a Comment