CNN Model of Image Detection in Keras (TensorFlow) in Python3

Share on Facebook0Share on Google+0Tweet about this on TwitterShare on LinkedIn0

This article covers the basic application of Keras and CNN in Python3, with Sublime text3 and Ipython Notebook as IDE. More details of the following code can be found in Robert Layton’s book here: https://www.goodreads.com/book/show/26019855-learning-data-mining-with-python?from_search=true

Screen Shot 2017-06-12 at 4.47.09 PM.png

###The book above said that we will build a system that will take an image as an input 
###and give a prediction on what the object in it is. We will take on the role of a vision 
###system for a car, looking around at any obstacles in the way or on the side of the road.

###The CIFAR-10 dataset is available for download at http://www.cs. toronto.edu/~kriz/cifar.html
###Download the python version, which has already been converted to NumPy arrays.

import pickle
# Bugfix thanks to: http://stackoverflow.com/questions/11305790/pickle-incompatability-of-numpy-arrays-between-python-2-and-3 
def unpickle(filename):
    with open(filename, 'rb') as fo: 
        return pickle.load(fo, encoding='latin1')
import os
import numpy as np 

data_folder = "/Users/Charles/Desktop/pyp/cifar-10-batches-py/"

batches = [] 
for i in range(1, 6):
    batch_filename = os.path.join(data_folder, "data_batch_{}".format(i))
    batches.append(unpickle(batch_filename))
print(len(batches))
#5

X = np.vstack([batch['data'] for batch in batches])
print(X.shape)
#(50000, 3072)

X = np.array(X) / X.max() 
X = X.astype(np.float32)
from keras.utils import np_utils
y = np.hstack(batch['labels'] for batch in batches).flatten()
nb_classes = len(np.unique(y))
y = np_utils.to_categorical(y, nb_classes)
print(X.shape, y.shape)
#(50000, 3072) (50000, 10)

from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.2)
print(X_train.shape, y_train.shape)
#(40000, 3072) (40000, 10)

X_train = X_train.reshape(-1, 3, 32, 32)
X_test = X_test.reshape(-1, 3, 32, 32)

from keras.models import Sequential
from keras.layers import Dense, Flatten, Convolution2D, MaxPooling2D
n_samples, d, h, w = X_train.shape
print(X_train.shape[1:], (d, h, w))
#(3, 32, 32) (3, 32, 32)

X_train = X_train.astype('float32')
X_test = X_test.astype('float32')
X_train /= 255
X_test /= 255
print(nb_classes)
#10

###here is a little different from the book mentioned above
conv1 = Convolution2D(32, 3, 3, input_shape=(d, h, w))
pool1 = MaxPooling2D(pool_size=(2, 2), dim_ordering="tf")
conv2 = Convolution2D(64, 2, 2)
pool2 = MaxPooling2D(pool_size=(2, 2), dim_ordering="tf")
conv3 = Convolution2D(128, 2, 2)
pool3 = MaxPooling2D(pool_size=(2, 2), dim_ordering="tf")
flatten = Flatten()
hidden4 = Dense(500)
hidden5 = Dense(500)
output = Dense(nb_classes)  # softmax?
layers = [conv1, pool1,
          conv2, pool2,
          conv3, pool3,
          flatten, hidden4, hidden5,
          output]

model = Sequential(layers=layers)
model.compile(loss='mean_squared_error', optimizer='adam', metrics=['accuracy'])

###The data set is way too huge for my poor 2013 MacPro so that I have to set the epoch to 1.
###We can use a smaller one from keras with the following code and add more epoches, or use AWS GPU:

#from keras.datasets import cifar10
#(X_train, y_train), (X_test, y_test) = cifar10.load_data()
#y_train = np_utils.to_categorical(y_train, nb_classes)

import tensorflow as tf
history = model.fit(X_train, y_train, nb_epoch=1, verbose=False,
                   validation_data=(X_test, y_test))

y_pred = model.predict(X_test)
from sklearn.metrics import classification_report
print(classification_report(y_pred=y_pred.argmax(axis=1),
                            y_true=y_test.argmax(axis=1)))

             #precision    recall  f1-score   support
#
          #0       0.35      0.70      0.47       987
          #1       0.54      0.55      0.55      1014
          #2       0.45      0.12      0.18       982
          #3       0.27      0.33      0.30       995
          #4       0.46      0.21      0.29      1004
          #5       0.36      0.46      0.41       999
          #6       0.53      0.39      0.45      1048
          #7       0.34      0.63      0.44       956
          #8       0.67      0.21      0.32      1007
          #9       0.49      0.47      0.48      1008
#
#avg / total       0.45      0.41      0.39     10000

Advertisements

Share on Facebook0Share on Google+0Tweet about this on TwitterShare on LinkedIn0