Horse Or Human Using TensorFlow 2.0

Beginning

Imports

Python

from functools import partial
from pathlib import Path

PyPi

from holoviews.operation.datashader import datashade
from tensorflow.keras.models import load_model
from tensorflow.keras import layers
from tensorflow.keras.applications.inception_v3 import InceptionV3
from tensorflow.keras.optimizers import RMSprop
from tensorflow.keras.preprocessing.image import ImageDataGenerator

import cv2
import holoviews
import hvplot.pandas
import matplotlib.pyplot as pyplot
import numpy
import pandas
import tensorflow

Graeae

from graeae import EmbedHoloviews, Timer, ZipDownloader

Setup

The Timer

TIMER = Timer()

Plotting

Embed = partial(
    EmbedHoloviews,
    folder_path="../../files/posts/keras/horse-or-human-using-tensorflow-20")
holoviews.extension("bokeh")

Storage

MODELS = Path("~/models/horses-vs-humans/").expanduser()
if not MODELS.is_dir():
    MODELS.mkdir()

Middle

The Data Set

OUTPUT = "~/data/datasets/images/horse-or-human/training/"
URL = ("https://storage.googleapis.com/"
       "laurencemoroney-blog.appspot.com/"
       "horse-or-human.zip")

download = ZipDownloader(url=URL, target=OUTPUT)
download()
Files exist, not downloading

:RESULTS:

2019-08-13 13:51:57,970 [1mZipDownloader[0m start: ([1mZipDownloader[0m) Started: 2019-08-13 13:51:57.970533
2019-08-13 13:51:57,972 [1mZipDownloader[0m download: Downloading the zip file
2019-08-13 13:52:08,157 [1mZipDownloader[0m end: ([1mZipDownloader[0m) Ended: 2019-08-13 13:52:08.157484
2019-08-13 13:52:08,159 [1mZipDownloader[0m end: ([1mZipDownloader[0m) Elapsed: 0:00:10.186951
output_path = download.target

The convention for training models for computer vision appears to be that you use the folder names to label the contents of the images within them. In this case we have horses and humans.

Here's what some of the files themselves are named.

horses_path = output_path/"horses"
humans_path = output_path/"humans"

for path in (horses_path, humans_path):
    print(path.name)
    for index, image in enumerate(path.iterdir()):
        print(f"File: {image.name}")
        if index == 9:
            break
    print()
horses
File: horse48-5.png
File: horse45-8.png
File: horse13-5.png
File: horse34-4.png
File: horse46-5.png
File: horse02-3.png
File: horse06-3.png
File: horse32-1.png
File: horse25-3.png
File: horse04-3.png

humans
File: human01-07.png
File: human02-11.png
File: human13-07.png
File: human10-10.png
File: human15-06.png
File: human05-15.png
File: human06-18.png
File: human16-28.png
File: human02-24.png
File: human10-05.png

So, in this case you can tell what they are from the file-names as well. How many images are there?

horse_files = list(horses_path.iterdir())
human_files = list(humans_path.iterdir())
print(f"Horse Images: {len(horse_files)}")
print(f"Human Images: {len(human_files)}")
print(f"Image Shape: {pyplot.imread(str(horse_files[0])).shape}")
Horse Images: 500
Human Images: 527
Image Shape: (300, 300, 4)

This is sort of a small data-set, and it's odd that there are more humans than horses. Let's see what some of them look like. I'm assuming all the files have the same shape. In this case it looks like they are 300 x 300 with four channels (RGB and alpha?).

height = width = 300
count = 4
columns = 2
horse_plots = [datashade(holoviews.RGB.load_image(str(horse)).opts(
    height=height,
    width=width,
))
               for horse in horse_files[:count]]
human_plots = [datashade(holoviews.RGB.load_image(str(human))).opts(
    height=height,
    width=width,
)
               for human in human_files[:count]]

plot = holoviews.Layout(horse_plots + human_plots).cols(2).opts(
    title="Horses and Humans")
Embed(plot=plot, file_name="horses_and_humans", 
      height_in_pixels=900)()

Figure Missing

As you can see, the people in the images aren't really humans (and it may not be so obvious, but they aren't horses either), these are computer-generated images.

The Model

input_shape = (300, 300, 3)
base_model = InceptionV3(input_shape=input_shape, include_top=False)
base_model.trainable = False
print(base_model.summary())
Model: "inception_v3"
__________________________________________________________________________________________________
Layer (type)                    Output Shape         Param #     Connected to                     
==================================================================================================
input_1 (InputLayer)            [(None, 300, 300, 3) 0                                            
__________________________________________________________________________________________________
conv2d (Conv2D)                 (None, 149, 149, 32) 864         input_1[0][0]                    
__________________________________________________________________________________________________
batch_normalization (BatchNorma (None, 149, 149, 32) 96          conv2d[0][0]                     
__________________________________________________________________________________________________
activation (Activation)         (None, 149, 149, 32) 0           batch_normalization[0][0]        
__________________________________________________________________________________________________
conv2d_1 (Conv2D)               (None, 147, 147, 32) 9216        activation[0][0]                 
__________________________________________________________________________________________________
batch_normalization_1 (BatchNor (None, 147, 147, 32) 96          conv2d_1[0][0]                   
__________________________________________________________________________________________________
activation_1 (Activation)       (None, 147, 147, 32) 0           batch_normalization_1[0][0]      
__________________________________________________________________________________________________
conv2d_2 (Conv2D)               (None, 147, 147, 64) 18432       activation_1[0][0]               
__________________________________________________________________________________________________
batch_normalization_2 (BatchNor (None, 147, 147, 64) 192         conv2d_2[0][0]                   
__________________________________________________________________________________________________
activation_2 (Activation)       (None, 147, 147, 64) 0           batch_normalization_2[0][0]      
__________________________________________________________________________________________________
max_pooling2d (MaxPooling2D)    (None, 73, 73, 64)   0           activation_2[0][0]               
__________________________________________________________________________________________________
conv2d_3 (Conv2D)               (None, 73, 73, 80)   5120        max_pooling2d[0][0]              
__________________________________________________________________________________________________
batch_normalization_3 (BatchNor (None, 73, 73, 80)   240         conv2d_3[0][0]                   
__________________________________________________________________________________________________
activation_3 (Activation)       (None, 73, 73, 80)   0           batch_normalization_3[0][0]      
__________________________________________________________________________________________________
conv2d_4 (Conv2D)               (None, 71, 71, 192)  138240      activation_3[0][0]               
__________________________________________________________________________________________________
batch_normalization_4 (BatchNor (None, 71, 71, 192)  576         conv2d_4[0][0]                   
__________________________________________________________________________________________________
activation_4 (Activation)       (None, 71, 71, 192)  0           batch_normalization_4[0][0]      
__________________________________________________________________________________________________
max_pooling2d_1 (MaxPooling2D)  (None, 35, 35, 192)  0           activation_4[0][0]               
__________________________________________________________________________________________________
conv2d_8 (Conv2D)               (None, 35, 35, 64)   12288       max_pooling2d_1[0][0]            
__________________________________________________________________________________________________
batch_normalization_8 (BatchNor (None, 35, 35, 64)   192         conv2d_8[0][0]                   
__________________________________________________________________________________________________
activation_8 (Activation)       (None, 35, 35, 64)   0           batch_normalization_8[0][0]      
__________________________________________________________________________________________________
conv2d_6 (Conv2D)               (None, 35, 35, 48)   9216        max_pooling2d_1[0][0]            
__________________________________________________________________________________________________
conv2d_9 (Conv2D)               (None, 35, 35, 96)   55296       activation_8[0][0]               
__________________________________________________________________________________________________
batch_normalization_6 (BatchNor (None, 35, 35, 48)   144         conv2d_6[0][0]                   
__________________________________________________________________________________________________
batch_normalization_9 (BatchNor (None, 35, 35, 96)   288         conv2d_9[0][0]                   
__________________________________________________________________________________________________
activation_6 (Activation)       (None, 35, 35, 48)   0           batch_normalization_6[0][0]      
__________________________________________________________________________________________________
activation_9 (Activation)       (None, 35, 35, 96)   0           batch_normalization_9[0][0]      
__________________________________________________________________________________________________
average_pooling2d (AveragePooli (None, 35, 35, 192)  0           max_pooling2d_1[0][0]            
__________________________________________________________________________________________________
conv2d_5 (Conv2D)               (None, 35, 35, 64)   12288       max_pooling2d_1[0][0]            
__________________________________________________________________________________________________
conv2d_7 (Conv2D)               (None, 35, 35, 64)   76800       activation_6[0][0]               
__________________________________________________________________________________________________
conv2d_10 (Conv2D)              (None, 35, 35, 96)   82944       activation_9[0][0]               
__________________________________________________________________________________________________
conv2d_11 (Conv2D)              (None, 35, 35, 32)   6144        average_pooling2d[0][0]          
__________________________________________________________________________________________________
batch_normalization_5 (BatchNor (None, 35, 35, 64)   192         conv2d_5[0][0]                   
__________________________________________________________________________________________________
batch_normalization_7 (BatchNor (None, 35, 35, 64)   192         conv2d_7[0][0]                   
__________________________________________________________________________________________________
batch_normalization_10 (BatchNo (None, 35, 35, 96)   288         conv2d_10[0][0]                  
__________________________________________________________________________________________________
batch_normalization_11 (BatchNo (None, 35, 35, 32)   96          conv2d_11[0][0]                  
__________________________________________________________________________________________________
activation_5 (Activation)       (None, 35, 35, 64)   0           batch_normalization_5[0][0]      
__________________________________________________________________________________________________
activation_7 (Activation)       (None, 35, 35, 64)   0           batch_normalization_7[0][0]      
__________________________________________________________________________________________________
activation_10 (Activation)      (None, 35, 35, 96)   0           batch_normalization_10[0][0]     
__________________________________________________________________________________________________
activation_11 (Activation)      (None, 35, 35, 32)   0           batch_normalization_11[0][0]     
__________________________________________________________________________________________________
mixed0 (Concatenate)            (None, 35, 35, 256)  0           activation_5[0][0]               
                                                                 activation_7[0][0]               
                                                                 activation_10[0][0]              
                                                                 activation_11[0][0]              
__________________________________________________________________________________________________
conv2d_15 (Conv2D)              (None, 35, 35, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
batch_normalization_15 (BatchNo (None, 35, 35, 64)   192         conv2d_15[0][0]                  
__________________________________________________________________________________________________
activation_15 (Activation)      (None, 35, 35, 64)   0           batch_normalization_15[0][0]     
__________________________________________________________________________________________________
conv2d_13 (Conv2D)              (None, 35, 35, 48)   12288       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_16 (Conv2D)              (None, 35, 35, 96)   55296       activation_15[0][0]              
__________________________________________________________________________________________________
batch_normalization_13 (BatchNo (None, 35, 35, 48)   144         conv2d_13[0][0]                  
__________________________________________________________________________________________________
batch_normalization_16 (BatchNo (None, 35, 35, 96)   288         conv2d_16[0][0]                  
__________________________________________________________________________________________________
activation_13 (Activation)      (None, 35, 35, 48)   0           batch_normalization_13[0][0]     
__________________________________________________________________________________________________
activation_16 (Activation)      (None, 35, 35, 96)   0           batch_normalization_16[0][0]     
__________________________________________________________________________________________________
average_pooling2d_1 (AveragePoo (None, 35, 35, 256)  0           mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_12 (Conv2D)              (None, 35, 35, 64)   16384       mixed0[0][0]                     
__________________________________________________________________________________________________
conv2d_14 (Conv2D)              (None, 35, 35, 64)   76800       activation_13[0][0]              
__________________________________________________________________________________________________
conv2d_17 (Conv2D)              (None, 35, 35, 96)   82944       activation_16[0][0]              
__________________________________________________________________________________________________
conv2d_18 (Conv2D)              (None, 35, 35, 64)   16384       average_pooling2d_1[0][0]        
__________________________________________________________________________________________________
batch_normalization_12 (BatchNo (None, 35, 35, 64)   192         conv2d_12[0][0]                  
__________________________________________________________________________________________________
batch_normalization_14 (BatchNo (None, 35, 35, 64)   192         conv2d_14[0][0]                  
__________________________________________________________________________________________________
batch_normalization_17 (BatchNo (None, 35, 35, 96)   288         conv2d_17[0][0]                  
__________________________________________________________________________________________________
batch_normalization_18 (BatchNo (None, 35, 35, 64)   192         conv2d_18[0][0]                  
__________________________________________________________________________________________________
activation_12 (Activation)      (None, 35, 35, 64)   0           batch_normalization_12[0][0]     
__________________________________________________________________________________________________
activation_14 (Activation)      (None, 35, 35, 64)   0           batch_normalization_14[0][0]     
__________________________________________________________________________________________________
activation_17 (Activation)      (None, 35, 35, 96)   0           batch_normalization_17[0][0]     
__________________________________________________________________________________________________
activation_18 (Activation)      (None, 35, 35, 64)   0           batch_normalization_18[0][0]     
__________________________________________________________________________________________________
mixed1 (Concatenate)            (None, 35, 35, 288)  0           activation_12[0][0]              
                                                                 activation_14[0][0]              
                                                                 activation_17[0][0]              
                                                                 activation_18[0][0]              
__________________________________________________________________________________________________
conv2d_22 (Conv2D)              (None, 35, 35, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
batch_normalization_22 (BatchNo (None, 35, 35, 64)   192         conv2d_22[0][0]                  
__________________________________________________________________________________________________
activation_22 (Activation)      (None, 35, 35, 64)   0           batch_normalization_22[0][0]     
__________________________________________________________________________________________________
conv2d_20 (Conv2D)              (None, 35, 35, 48)   13824       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_23 (Conv2D)              (None, 35, 35, 96)   55296       activation_22[0][0]              
__________________________________________________________________________________________________
batch_normalization_20 (BatchNo (None, 35, 35, 48)   144         conv2d_20[0][0]                  
__________________________________________________________________________________________________
batch_normalization_23 (BatchNo (None, 35, 35, 96)   288         conv2d_23[0][0]                  
__________________________________________________________________________________________________
activation_20 (Activation)      (None, 35, 35, 48)   0           batch_normalization_20[0][0]     
__________________________________________________________________________________________________
activation_23 (Activation)      (None, 35, 35, 96)   0           batch_normalization_23[0][0]     
__________________________________________________________________________________________________
average_pooling2d_2 (AveragePoo (None, 35, 35, 288)  0           mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_19 (Conv2D)              (None, 35, 35, 64)   18432       mixed1[0][0]                     
__________________________________________________________________________________________________
conv2d_21 (Conv2D)              (None, 35, 35, 64)   76800       activation_20[0][0]              
__________________________________________________________________________________________________
conv2d_24 (Conv2D)              (None, 35, 35, 96)   82944       activation_23[0][0]              
__________________________________________________________________________________________________
conv2d_25 (Conv2D)              (None, 35, 35, 64)   18432       average_pooling2d_2[0][0]        
__________________________________________________________________________________________________
batch_normalization_19 (BatchNo (None, 35, 35, 64)   192         conv2d_19[0][0]                  
__________________________________________________________________________________________________
batch_normalization_21 (BatchNo (None, 35, 35, 64)   192         conv2d_21[0][0]                  
__________________________________________________________________________________________________
batch_normalization_24 (BatchNo (None, 35, 35, 96)   288         conv2d_24[0][0]                  
__________________________________________________________________________________________________
batch_normalization_25 (BatchNo (None, 35, 35, 64)   192         conv2d_25[0][0]                  
__________________________________________________________________________________________________
activation_19 (Activation)      (None, 35, 35, 64)   0           batch_normalization_19[0][0]     
__________________________________________________________________________________________________
activation_21 (Activation)      (None, 35, 35, 64)   0           batch_normalization_21[0][0]     
__________________________________________________________________________________________________
activation_24 (Activation)      (None, 35, 35, 96)   0           batch_normalization_24[0][0]     
__________________________________________________________________________________________________
activation_25 (Activation)      (None, 35, 35, 64)   0           batch_normalization_25[0][0]     
__________________________________________________________________________________________________
mixed2 (Concatenate)            (None, 35, 35, 288)  0           activation_19[0][0]              
                                                                 activation_21[0][0]              
                                                                 activation_24[0][0]              
                                                                 activation_25[0][0]              
__________________________________________________________________________________________________
conv2d_27 (Conv2D)              (None, 35, 35, 64)   18432       mixed2[0][0]                     
__________________________________________________________________________________________________
batch_normalization_27 (BatchNo (None, 35, 35, 64)   192         conv2d_27[0][0]                  
__________________________________________________________________________________________________
activation_27 (Activation)      (None, 35, 35, 64)   0           batch_normalization_27[0][0]     
__________________________________________________________________________________________________
conv2d_28 (Conv2D)              (None, 35, 35, 96)   55296       activation_27[0][0]              
__________________________________________________________________________________________________
batch_normalization_28 (BatchNo (None, 35, 35, 96)   288         conv2d_28[0][0]                  
__________________________________________________________________________________________________
activation_28 (Activation)      (None, 35, 35, 96)   0           batch_normalization_28[0][0]     
__________________________________________________________________________________________________
conv2d_26 (Conv2D)              (None, 17, 17, 384)  995328      mixed2[0][0]                     
__________________________________________________________________________________________________
conv2d_29 (Conv2D)              (None, 17, 17, 96)   82944       activation_28[0][0]              
__________________________________________________________________________________________________
batch_normalization_26 (BatchNo (None, 17, 17, 384)  1152        conv2d_26[0][0]                  
__________________________________________________________________________________________________
batch_normalization_29 (BatchNo (None, 17, 17, 96)   288         conv2d_29[0][0]                  
__________________________________________________________________________________________________
activation_26 (Activation)      (None, 17, 17, 384)  0           batch_normalization_26[0][0]     
__________________________________________________________________________________________________
activation_29 (Activation)      (None, 17, 17, 96)   0           batch_normalization_29[0][0]     
__________________________________________________________________________________________________
max_pooling2d_2 (MaxPooling2D)  (None, 17, 17, 288)  0           mixed2[0][0]                     
__________________________________________________________________________________________________
mixed3 (Concatenate)            (None, 17, 17, 768)  0           activation_26[0][0]              
                                                                 activation_29[0][0]              
                                                                 max_pooling2d_2[0][0]            
__________________________________________________________________________________________________
conv2d_34 (Conv2D)              (None, 17, 17, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
batch_normalization_34 (BatchNo (None, 17, 17, 128)  384         conv2d_34[0][0]                  
__________________________________________________________________________________________________
activation_34 (Activation)      (None, 17, 17, 128)  0           batch_normalization_34[0][0]     
__________________________________________________________________________________________________
conv2d_35 (Conv2D)              (None, 17, 17, 128)  114688      activation_34[0][0]              
__________________________________________________________________________________________________
batch_normalization_35 (BatchNo (None, 17, 17, 128)  384         conv2d_35[0][0]                  
__________________________________________________________________________________________________
activation_35 (Activation)      (None, 17, 17, 128)  0           batch_normalization_35[0][0]     
__________________________________________________________________________________________________
conv2d_31 (Conv2D)              (None, 17, 17, 128)  98304       mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_36 (Conv2D)              (None, 17, 17, 128)  114688      activation_35[0][0]              
__________________________________________________________________________________________________
batch_normalization_31 (BatchNo (None, 17, 17, 128)  384         conv2d_31[0][0]                  
__________________________________________________________________________________________________
batch_normalization_36 (BatchNo (None, 17, 17, 128)  384         conv2d_36[0][0]                  
__________________________________________________________________________________________________
activation_31 (Activation)      (None, 17, 17, 128)  0           batch_normalization_31[0][0]     
__________________________________________________________________________________________________
activation_36 (Activation)      (None, 17, 17, 128)  0           batch_normalization_36[0][0]     
__________________________________________________________________________________________________
conv2d_32 (Conv2D)              (None, 17, 17, 128)  114688      activation_31[0][0]              
__________________________________________________________________________________________________
conv2d_37 (Conv2D)              (None, 17, 17, 128)  114688      activation_36[0][0]              
__________________________________________________________________________________________________
batch_normalization_32 (BatchNo (None, 17, 17, 128)  384         conv2d_32[0][0]                  
__________________________________________________________________________________________________
batch_normalization_37 (BatchNo (None, 17, 17, 128)  384         conv2d_37[0][0]                  
__________________________________________________________________________________________________
activation_32 (Activation)      (None, 17, 17, 128)  0           batch_normalization_32[0][0]     
__________________________________________________________________________________________________
activation_37 (Activation)      (None, 17, 17, 128)  0           batch_normalization_37[0][0]     
__________________________________________________________________________________________________
average_pooling2d_3 (AveragePoo (None, 17, 17, 768)  0           mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_30 (Conv2D)              (None, 17, 17, 192)  147456      mixed3[0][0]                     
__________________________________________________________________________________________________
conv2d_33 (Conv2D)              (None, 17, 17, 192)  172032      activation_32[0][0]              
__________________________________________________________________________________________________
conv2d_38 (Conv2D)              (None, 17, 17, 192)  172032      activation_37[0][0]              
__________________________________________________________________________________________________
conv2d_39 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_3[0][0]        
__________________________________________________________________________________________________
batch_normalization_30 (BatchNo (None, 17, 17, 192)  576         conv2d_30[0][0]                  
__________________________________________________________________________________________________
batch_normalization_33 (BatchNo (None, 17, 17, 192)  576         conv2d_33[0][0]                  
__________________________________________________________________________________________________
batch_normalization_38 (BatchNo (None, 17, 17, 192)  576         conv2d_38[0][0]                  
__________________________________________________________________________________________________
batch_normalization_39 (BatchNo (None, 17, 17, 192)  576         conv2d_39[0][0]                  
__________________________________________________________________________________________________
activation_30 (Activation)      (None, 17, 17, 192)  0           batch_normalization_30[0][0]     
__________________________________________________________________________________________________
activation_33 (Activation)      (None, 17, 17, 192)  0           batch_normalization_33[0][0]     
__________________________________________________________________________________________________
activation_38 (Activation)      (None, 17, 17, 192)  0           batch_normalization_38[0][0]     
__________________________________________________________________________________________________
activation_39 (Activation)      (None, 17, 17, 192)  0           batch_normalization_39[0][0]     
__________________________________________________________________________________________________
mixed4 (Concatenate)            (None, 17, 17, 768)  0           activation_30[0][0]              
                                                                 activation_33[0][0]              
                                                                 activation_38[0][0]              
                                                                 activation_39[0][0]              
__________________________________________________________________________________________________
conv2d_44 (Conv2D)              (None, 17, 17, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
batch_normalization_44 (BatchNo (None, 17, 17, 160)  480         conv2d_44[0][0]                  
__________________________________________________________________________________________________
activation_44 (Activation)      (None, 17, 17, 160)  0           batch_normalization_44[0][0]     
__________________________________________________________________________________________________
conv2d_45 (Conv2D)              (None, 17, 17, 160)  179200      activation_44[0][0]              
__________________________________________________________________________________________________
batch_normalization_45 (BatchNo (None, 17, 17, 160)  480         conv2d_45[0][0]                  
__________________________________________________________________________________________________
activation_45 (Activation)      (None, 17, 17, 160)  0           batch_normalization_45[0][0]     
__________________________________________________________________________________________________
conv2d_41 (Conv2D)              (None, 17, 17, 160)  122880      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_46 (Conv2D)              (None, 17, 17, 160)  179200      activation_45[0][0]              
__________________________________________________________________________________________________
batch_normalization_41 (BatchNo (None, 17, 17, 160)  480         conv2d_41[0][0]                  
__________________________________________________________________________________________________
batch_normalization_46 (BatchNo (None, 17, 17, 160)  480         conv2d_46[0][0]                  
__________________________________________________________________________________________________
activation_41 (Activation)      (None, 17, 17, 160)  0           batch_normalization_41[0][0]     
__________________________________________________________________________________________________
activation_46 (Activation)      (None, 17, 17, 160)  0           batch_normalization_46[0][0]     
__________________________________________________________________________________________________
conv2d_42 (Conv2D)              (None, 17, 17, 160)  179200      activation_41[0][0]              
__________________________________________________________________________________________________
conv2d_47 (Conv2D)              (None, 17, 17, 160)  179200      activation_46[0][0]              
__________________________________________________________________________________________________
batch_normalization_42 (BatchNo (None, 17, 17, 160)  480         conv2d_42[0][0]                  
__________________________________________________________________________________________________
batch_normalization_47 (BatchNo (None, 17, 17, 160)  480         conv2d_47[0][0]                  
__________________________________________________________________________________________________
activation_42 (Activation)      (None, 17, 17, 160)  0           batch_normalization_42[0][0]     
__________________________________________________________________________________________________
activation_47 (Activation)      (None, 17, 17, 160)  0           batch_normalization_47[0][0]     
__________________________________________________________________________________________________
average_pooling2d_4 (AveragePoo (None, 17, 17, 768)  0           mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_40 (Conv2D)              (None, 17, 17, 192)  147456      mixed4[0][0]                     
__________________________________________________________________________________________________
conv2d_43 (Conv2D)              (None, 17, 17, 192)  215040      activation_42[0][0]              
__________________________________________________________________________________________________
conv2d_48 (Conv2D)              (None, 17, 17, 192)  215040      activation_47[0][0]              
__________________________________________________________________________________________________
conv2d_49 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_4[0][0]        
__________________________________________________________________________________________________
batch_normalization_40 (BatchNo (None, 17, 17, 192)  576         conv2d_40[0][0]                  
__________________________________________________________________________________________________
batch_normalization_43 (BatchNo (None, 17, 17, 192)  576         conv2d_43[0][0]                  
__________________________________________________________________________________________________
batch_normalization_48 (BatchNo (None, 17, 17, 192)  576         conv2d_48[0][0]                  
__________________________________________________________________________________________________
batch_normalization_49 (BatchNo (None, 17, 17, 192)  576         conv2d_49[0][0]                  
__________________________________________________________________________________________________
activation_40 (Activation)      (None, 17, 17, 192)  0           batch_normalization_40[0][0]     
__________________________________________________________________________________________________
activation_43 (Activation)      (None, 17, 17, 192)  0           batch_normalization_43[0][0]     
__________________________________________________________________________________________________
activation_48 (Activation)      (None, 17, 17, 192)  0           batch_normalization_48[0][0]     
__________________________________________________________________________________________________
activation_49 (Activation)      (None, 17, 17, 192)  0           batch_normalization_49[0][0]     
__________________________________________________________________________________________________
mixed5 (Concatenate)            (None, 17, 17, 768)  0           activation_40[0][0]              
                                                                 activation_43[0][0]              
                                                                 activation_48[0][0]              
                                                                 activation_49[0][0]              
__________________________________________________________________________________________________
conv2d_54 (Conv2D)              (None, 17, 17, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
batch_normalization_54 (BatchNo (None, 17, 17, 160)  480         conv2d_54[0][0]                  
__________________________________________________________________________________________________
activation_54 (Activation)      (None, 17, 17, 160)  0           batch_normalization_54[0][0]     
__________________________________________________________________________________________________
conv2d_55 (Conv2D)              (None, 17, 17, 160)  179200      activation_54[0][0]              
__________________________________________________________________________________________________
batch_normalization_55 (BatchNo (None, 17, 17, 160)  480         conv2d_55[0][0]                  
__________________________________________________________________________________________________
activation_55 (Activation)      (None, 17, 17, 160)  0           batch_normalization_55[0][0]     
__________________________________________________________________________________________________
conv2d_51 (Conv2D)              (None, 17, 17, 160)  122880      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_56 (Conv2D)              (None, 17, 17, 160)  179200      activation_55[0][0]              
__________________________________________________________________________________________________
batch_normalization_51 (BatchNo (None, 17, 17, 160)  480         conv2d_51[0][0]                  
__________________________________________________________________________________________________
batch_normalization_56 (BatchNo (None, 17, 17, 160)  480         conv2d_56[0][0]                  
__________________________________________________________________________________________________
activation_51 (Activation)      (None, 17, 17, 160)  0           batch_normalization_51[0][0]     
__________________________________________________________________________________________________
activation_56 (Activation)      (None, 17, 17, 160)  0           batch_normalization_56[0][0]     
__________________________________________________________________________________________________
conv2d_52 (Conv2D)              (None, 17, 17, 160)  179200      activation_51[0][0]              
__________________________________________________________________________________________________
conv2d_57 (Conv2D)              (None, 17, 17, 160)  179200      activation_56[0][0]              
__________________________________________________________________________________________________
batch_normalization_52 (BatchNo (None, 17, 17, 160)  480         conv2d_52[0][0]                  
__________________________________________________________________________________________________
batch_normalization_57 (BatchNo (None, 17, 17, 160)  480         conv2d_57[0][0]                  
__________________________________________________________________________________________________
activation_52 (Activation)      (None, 17, 17, 160)  0           batch_normalization_52[0][0]     
__________________________________________________________________________________________________
activation_57 (Activation)      (None, 17, 17, 160)  0           batch_normalization_57[0][0]     
__________________________________________________________________________________________________
average_pooling2d_5 (AveragePoo (None, 17, 17, 768)  0           mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_50 (Conv2D)              (None, 17, 17, 192)  147456      mixed5[0][0]                     
__________________________________________________________________________________________________
conv2d_53 (Conv2D)              (None, 17, 17, 192)  215040      activation_52[0][0]              
__________________________________________________________________________________________________
conv2d_58 (Conv2D)              (None, 17, 17, 192)  215040      activation_57[0][0]              
__________________________________________________________________________________________________
conv2d_59 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_5[0][0]        
__________________________________________________________________________________________________
batch_normalization_50 (BatchNo (None, 17, 17, 192)  576         conv2d_50[0][0]                  
__________________________________________________________________________________________________
batch_normalization_53 (BatchNo (None, 17, 17, 192)  576         conv2d_53[0][0]                  
__________________________________________________________________________________________________
batch_normalization_58 (BatchNo (None, 17, 17, 192)  576         conv2d_58[0][0]                  
__________________________________________________________________________________________________
batch_normalization_59 (BatchNo (None, 17, 17, 192)  576         conv2d_59[0][0]                  
__________________________________________________________________________________________________
activation_50 (Activation)      (None, 17, 17, 192)  0           batch_normalization_50[0][0]     
__________________________________________________________________________________________________
activation_53 (Activation)      (None, 17, 17, 192)  0           batch_normalization_53[0][0]     
__________________________________________________________________________________________________
activation_58 (Activation)      (None, 17, 17, 192)  0           batch_normalization_58[0][0]     
__________________________________________________________________________________________________
activation_59 (Activation)      (None, 17, 17, 192)  0           batch_normalization_59[0][0]     
__________________________________________________________________________________________________
mixed6 (Concatenate)            (None, 17, 17, 768)  0           activation_50[0][0]              
                                                                 activation_53[0][0]              
                                                                 activation_58[0][0]              
                                                                 activation_59[0][0]              
__________________________________________________________________________________________________
conv2d_64 (Conv2D)              (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
batch_normalization_64 (BatchNo (None, 17, 17, 192)  576         conv2d_64[0][0]                  
__________________________________________________________________________________________________
activation_64 (Activation)      (None, 17, 17, 192)  0           batch_normalization_64[0][0]     
__________________________________________________________________________________________________
conv2d_65 (Conv2D)              (None, 17, 17, 192)  258048      activation_64[0][0]              
__________________________________________________________________________________________________
batch_normalization_65 (BatchNo (None, 17, 17, 192)  576         conv2d_65[0][0]                  
__________________________________________________________________________________________________
activation_65 (Activation)      (None, 17, 17, 192)  0           batch_normalization_65[0][0]     
__________________________________________________________________________________________________
conv2d_61 (Conv2D)              (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_66 (Conv2D)              (None, 17, 17, 192)  258048      activation_65[0][0]              
__________________________________________________________________________________________________
batch_normalization_61 (BatchNo (None, 17, 17, 192)  576         conv2d_61[0][0]                  
__________________________________________________________________________________________________
batch_normalization_66 (BatchNo (None, 17, 17, 192)  576         conv2d_66[0][0]                  
__________________________________________________________________________________________________
activation_61 (Activation)      (None, 17, 17, 192)  0           batch_normalization_61[0][0]     
__________________________________________________________________________________________________
activation_66 (Activation)      (None, 17, 17, 192)  0           batch_normalization_66[0][0]     
__________________________________________________________________________________________________
conv2d_62 (Conv2D)              (None, 17, 17, 192)  258048      activation_61[0][0]              
__________________________________________________________________________________________________
conv2d_67 (Conv2D)              (None, 17, 17, 192)  258048      activation_66[0][0]              
__________________________________________________________________________________________________
batch_normalization_62 (BatchNo (None, 17, 17, 192)  576         conv2d_62[0][0]                  
__________________________________________________________________________________________________
batch_normalization_67 (BatchNo (None, 17, 17, 192)  576         conv2d_67[0][0]                  
__________________________________________________________________________________________________
activation_62 (Activation)      (None, 17, 17, 192)  0           batch_normalization_62[0][0]     
__________________________________________________________________________________________________
activation_67 (Activation)      (None, 17, 17, 192)  0           batch_normalization_67[0][0]     
__________________________________________________________________________________________________
average_pooling2d_6 (AveragePoo (None, 17, 17, 768)  0           mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_60 (Conv2D)              (None, 17, 17, 192)  147456      mixed6[0][0]                     
__________________________________________________________________________________________________
conv2d_63 (Conv2D)              (None, 17, 17, 192)  258048      activation_62[0][0]              
__________________________________________________________________________________________________
conv2d_68 (Conv2D)              (None, 17, 17, 192)  258048      activation_67[0][0]              
__________________________________________________________________________________________________
conv2d_69 (Conv2D)              (None, 17, 17, 192)  147456      average_pooling2d_6[0][0]        
__________________________________________________________________________________________________
batch_normalization_60 (BatchNo (None, 17, 17, 192)  576         conv2d_60[0][0]                  
__________________________________________________________________________________________________
batch_normalization_63 (BatchNo (None, 17, 17, 192)  576         conv2d_63[0][0]                  
__________________________________________________________________________________________________
batch_normalization_68 (BatchNo (None, 17, 17, 192)  576         conv2d_68[0][0]                  
__________________________________________________________________________________________________
batch_normalization_69 (BatchNo (None, 17, 17, 192)  576         conv2d_69[0][0]                  
__________________________________________________________________________________________________
activation_60 (Activation)      (None, 17, 17, 192)  0           batch_normalization_60[0][0]     
__________________________________________________________________________________________________
activation_63 (Activation)      (None, 17, 17, 192)  0           batch_normalization_63[0][0]     
__________________________________________________________________________________________________
activation_68 (Activation)      (None, 17, 17, 192)  0           batch_normalization_68[0][0]     
__________________________________________________________________________________________________
activation_69 (Activation)      (None, 17, 17, 192)  0           batch_normalization_69[0][0]     
__________________________________________________________________________________________________
mixed7 (Concatenate)            (None, 17, 17, 768)  0           activation_60[0][0]              
                                                                 activation_63[0][0]              
                                                                 activation_68[0][0]              
                                                                 activation_69[0][0]              
__________________________________________________________________________________________________
conv2d_72 (Conv2D)              (None, 17, 17, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
batch_normalization_72 (BatchNo (None, 17, 17, 192)  576         conv2d_72[0][0]                  
__________________________________________________________________________________________________
activation_72 (Activation)      (None, 17, 17, 192)  0           batch_normalization_72[0][0]     
__________________________________________________________________________________________________
conv2d_73 (Conv2D)              (None, 17, 17, 192)  258048      activation_72[0][0]              
__________________________________________________________________________________________________
batch_normalization_73 (BatchNo (None, 17, 17, 192)  576         conv2d_73[0][0]                  
__________________________________________________________________________________________________
activation_73 (Activation)      (None, 17, 17, 192)  0           batch_normalization_73[0][0]     
__________________________________________________________________________________________________
conv2d_70 (Conv2D)              (None, 17, 17, 192)  147456      mixed7[0][0]                     
__________________________________________________________________________________________________
conv2d_74 (Conv2D)              (None, 17, 17, 192)  258048      activation_73[0][0]              
__________________________________________________________________________________________________
batch_normalization_70 (BatchNo (None, 17, 17, 192)  576         conv2d_70[0][0]                  
__________________________________________________________________________________________________
batch_normalization_74 (BatchNo (None, 17, 17, 192)  576         conv2d_74[0][0]                  
__________________________________________________________________________________________________
activation_70 (Activation)      (None, 17, 17, 192)  0           batch_normalization_70[0][0]     
__________________________________________________________________________________________________
activation_74 (Activation)      (None, 17, 17, 192)  0           batch_normalization_74[0][0]     
__________________________________________________________________________________________________
conv2d_71 (Conv2D)              (None, 8, 8, 320)    552960      activation_70[0][0]              
__________________________________________________________________________________________________
conv2d_75 (Conv2D)              (None, 8, 8, 192)    331776      activation_74[0][0]              
__________________________________________________________________________________________________
batch_normalization_71 (BatchNo (None, 8, 8, 320)    960         conv2d_71[0][0]                  
__________________________________________________________________________________________________
batch_normalization_75 (BatchNo (None, 8, 8, 192)    576         conv2d_75[0][0]                  
__________________________________________________________________________________________________
activation_71 (Activation)      (None, 8, 8, 320)    0           batch_normalization_71[0][0]     
__________________________________________________________________________________________________
activation_75 (Activation)      (None, 8, 8, 192)    0           batch_normalization_75[0][0]     
__________________________________________________________________________________________________
max_pooling2d_3 (MaxPooling2D)  (None, 8, 8, 768)    0           mixed7[0][0]                     
__________________________________________________________________________________________________
mixed8 (Concatenate)            (None, 8, 8, 1280)   0           activation_71[0][0]              
                                                                 activation_75[0][0]              
                                                                 max_pooling2d_3[0][0]            
__________________________________________________________________________________________________
conv2d_80 (Conv2D)              (None, 8, 8, 448)    573440      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_80 (BatchNo (None, 8, 8, 448)    1344        conv2d_80[0][0]                  
__________________________________________________________________________________________________
activation_80 (Activation)      (None, 8, 8, 448)    0           batch_normalization_80[0][0]     
__________________________________________________________________________________________________
conv2d_77 (Conv2D)              (None, 8, 8, 384)    491520      mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_81 (Conv2D)              (None, 8, 8, 384)    1548288     activation_80[0][0]              
__________________________________________________________________________________________________
batch_normalization_77 (BatchNo (None, 8, 8, 384)    1152        conv2d_77[0][0]                  
__________________________________________________________________________________________________
batch_normalization_81 (BatchNo (None, 8, 8, 384)    1152        conv2d_81[0][0]                  
__________________________________________________________________________________________________
activation_77 (Activation)      (None, 8, 8, 384)    0           batch_normalization_77[0][0]     
__________________________________________________________________________________________________
activation_81 (Activation)      (None, 8, 8, 384)    0           batch_normalization_81[0][0]     
__________________________________________________________________________________________________
conv2d_78 (Conv2D)              (None, 8, 8, 384)    442368      activation_77[0][0]              
__________________________________________________________________________________________________
conv2d_79 (Conv2D)              (None, 8, 8, 384)    442368      activation_77[0][0]              
__________________________________________________________________________________________________
conv2d_82 (Conv2D)              (None, 8, 8, 384)    442368      activation_81[0][0]              
__________________________________________________________________________________________________
conv2d_83 (Conv2D)              (None, 8, 8, 384)    442368      activation_81[0][0]              
__________________________________________________________________________________________________
average_pooling2d_7 (AveragePoo (None, 8, 8, 1280)   0           mixed8[0][0]                     
__________________________________________________________________________________________________
conv2d_76 (Conv2D)              (None, 8, 8, 320)    409600      mixed8[0][0]                     
__________________________________________________________________________________________________
batch_normalization_78 (BatchNo (None, 8, 8, 384)    1152        conv2d_78[0][0]                  
__________________________________________________________________________________________________
batch_normalization_79 (BatchNo (None, 8, 8, 384)    1152        conv2d_79[0][0]                  
__________________________________________________________________________________________________
batch_normalization_82 (BatchNo (None, 8, 8, 384)    1152        conv2d_82[0][0]                  
__________________________________________________________________________________________________
batch_normalization_83 (BatchNo (None, 8, 8, 384)    1152        conv2d_83[0][0]                  
__________________________________________________________________________________________________
conv2d_84 (Conv2D)              (None, 8, 8, 192)    245760      average_pooling2d_7[0][0]        
__________________________________________________________________________________________________
batch_normalization_76 (BatchNo (None, 8, 8, 320)    960         conv2d_76[0][0]                  
__________________________________________________________________________________________________
activation_78 (Activation)      (None, 8, 8, 384)    0           batch_normalization_78[0][0]     
__________________________________________________________________________________________________
activation_79 (Activation)      (None, 8, 8, 384)    0           batch_normalization_79[0][0]     
__________________________________________________________________________________________________
activation_82 (Activation)      (None, 8, 8, 384)    0           batch_normalization_82[0][0]     
__________________________________________________________________________________________________
activation_83 (Activation)      (None, 8, 8, 384)    0           batch_normalization_83[0][0]     
__________________________________________________________________________________________________
batch_normalization_84 (BatchNo (None, 8, 8, 192)    576         conv2d_84[0][0]                  
__________________________________________________________________________________________________
activation_76 (Activation)      (None, 8, 8, 320)    0           batch_normalization_76[0][0]     
__________________________________________________________________________________________________
mixed9_0 (Concatenate)          (None, 8, 8, 768)    0           activation_78[0][0]              
                                                                 activation_79[0][0]              
__________________________________________________________________________________________________
concatenate (Concatenate)       (None, 8, 8, 768)    0           activation_82[0][0]              
                                                                 activation_83[0][0]              
__________________________________________________________________________________________________
activation_84 (Activation)      (None, 8, 8, 192)    0           batch_normalization_84[0][0]     
__________________________________________________________________________________________________
mixed9 (Concatenate)            (None, 8, 8, 2048)   0           activation_76[0][0]              
                                                                 mixed9_0[0][0]                   
                                                                 concatenate[0][0]                
                                                                 activation_84[0][0]              
__________________________________________________________________________________________________
conv2d_89 (Conv2D)              (None, 8, 8, 448)    917504      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_89 (BatchNo (None, 8, 8, 448)    1344        conv2d_89[0][0]                  
__________________________________________________________________________________________________
activation_89 (Activation)      (None, 8, 8, 448)    0           batch_normalization_89[0][0]     
__________________________________________________________________________________________________
conv2d_86 (Conv2D)              (None, 8, 8, 384)    786432      mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_90 (Conv2D)              (None, 8, 8, 384)    1548288     activation_89[0][0]              
__________________________________________________________________________________________________
batch_normalization_86 (BatchNo (None, 8, 8, 384)    1152        conv2d_86[0][0]                  
__________________________________________________________________________________________________
batch_normalization_90 (BatchNo (None, 8, 8, 384)    1152        conv2d_90[0][0]                  
__________________________________________________________________________________________________
activation_86 (Activation)      (None, 8, 8, 384)    0           batch_normalization_86[0][0]     
__________________________________________________________________________________________________
activation_90 (Activation)      (None, 8, 8, 384)    0           batch_normalization_90[0][0]     
__________________________________________________________________________________________________
conv2d_87 (Conv2D)              (None, 8, 8, 384)    442368      activation_86[0][0]              
__________________________________________________________________________________________________
conv2d_88 (Conv2D)              (None, 8, 8, 384)    442368      activation_86[0][0]              
__________________________________________________________________________________________________
conv2d_91 (Conv2D)              (None, 8, 8, 384)    442368      activation_90[0][0]              
__________________________________________________________________________________________________
conv2d_92 (Conv2D)              (None, 8, 8, 384)    442368      activation_90[0][0]              
__________________________________________________________________________________________________
average_pooling2d_8 (AveragePoo (None, 8, 8, 2048)   0           mixed9[0][0]                     
__________________________________________________________________________________________________
conv2d_85 (Conv2D)              (None, 8, 8, 320)    655360      mixed9[0][0]                     
__________________________________________________________________________________________________
batch_normalization_87 (BatchNo (None, 8, 8, 384)    1152        conv2d_87[0][0]                  
__________________________________________________________________________________________________
batch_normalization_88 (BatchNo (None, 8, 8, 384)    1152        conv2d_88[0][0]                  
__________________________________________________________________________________________________
batch_normalization_91 (BatchNo (None, 8, 8, 384)    1152        conv2d_91[0][0]                  
__________________________________________________________________________________________________
batch_normalization_92 (BatchNo (None, 8, 8, 384)    1152        conv2d_92[0][0]                  
__________________________________________________________________________________________________
conv2d_93 (Conv2D)              (None, 8, 8, 192)    393216      average_pooling2d_8[0][0]        
__________________________________________________________________________________________________
batch_normalization_85 (BatchNo (None, 8, 8, 320)    960         conv2d_85[0][0]                  
__________________________________________________________________________________________________
activation_87 (Activation)      (None, 8, 8, 384)    0           batch_normalization_87[0][0]     
__________________________________________________________________________________________________
activation_88 (Activation)      (None, 8, 8, 384)    0           batch_normalization_88[0][0]     
__________________________________________________________________________________________________
activation_91 (Activation)      (None, 8, 8, 384)    0           batch_normalization_91[0][0]     
__________________________________________________________________________________________________
activation_92 (Activation)      (None, 8, 8, 384)    0           batch_normalization_92[0][0]     
__________________________________________________________________________________________________
batch_normalization_93 (BatchNo (None, 8, 8, 192)    576         conv2d_93[0][0]                  
__________________________________________________________________________________________________
activation_85 (Activation)      (None, 8, 8, 320)    0           batch_normalization_85[0][0]     
__________________________________________________________________________________________________
mixed9_1 (Concatenate)          (None, 8, 8, 768)    0           activation_87[0][0]              
                                                                 activation_88[0][0]              
__________________________________________________________________________________________________
concatenate_1 (Concatenate)     (None, 8, 8, 768)    0           activation_91[0][0]              
                                                                 activation_92[0][0]              
__________________________________________________________________________________________________
activation_93 (Activation)      (None, 8, 8, 192)    0           batch_normalization_93[0][0]     
__________________________________________________________________________________________________
mixed10 (Concatenate)           (None, 8, 8, 2048)   0           activation_85[0][0]              
                                                                 mixed9_1[0][0]                   
                                                                 concatenate_1[0][0]              
                                                                 activation_93[0][0]              
==================================================================================================
Total params: 21,802,784
Trainable params: 0
Non-trainable params: 21,802,784
__________________________________________________________________________________________________
None

Create the Output Layers

x = layers.GlobalAveragePooling2D()(base_model.output)
x = layers.Dense(1024, activation="relu")(x)
x = layers.Dropout(0.2)(x)
x = layers.Dense(1, activation="sigmoid")(x)

Now build the model combining the pre-built layer with a Dense layer (that we're going to train). Since we only have two classes the activation function is the sigmoid.

model = tensorflow.keras.Model(
    base_model.input,
    x,
)

Compile the Model

model.compile(optimizer = RMSprop(lr=0.0001), 
              loss = 'binary_crossentropy', 
              metrics = ['acc'])

Train the Model

A Model Saver

best_model = MODELS/"inception_transfer.hdf5"
checkpoint = tensorflow.keras.callbacks.ModelCheckpoint(
    str(best_model), monitor="val_acc", verbose=1, 
    save_best_only=True)

A good Enough Callback

class GoodEnough(tensorflow.keras.callbacks.Callback):
  def on_epoch_end(self, epoch, logs={}):
    if logs.get('acc') > 0.999:
      print("\nReached 99.9% accuracy so cancelling training!")
      self.model.stop_training = True

A Data Generator

This bundles up the steps to build the data generator.

class Data:
    """creates the data generators

    Args:
     path: path to the images
     validation_split: fraction that goes to the validation set
     batch_size: size for the batches in the epochs
    """
    def __init__(self, path: str, validation_split: float=0.2,
                 batch_size: int=20) -> None:
        self.path = path
        self.validation_split = validation_split
        self.batch_size = batch_size
        self._data_generator = None
        self._testing_data_generator = None
        self._training_generator = None
        self._validation_generator = None
        return

    @property
    def data_generator(self) -> ImageDataGenerator:
        """The data generator for training and validation"""
        if self._data_generator is None:
            self._data_generator = ImageDataGenerator(
                rescale=1/255,
                rotation_range=40,
                width_shift_range=0.2,
                height_shift_range=0.2,
                horizontal_flip=True,
                shear_range=0.2,
                zoom_range=0.2,
                fill_mode="nearest",
                validation_split=self.validation_split)
        return self._data_generator

    @property
    def training_generator(self):
        """The training data generator"""
        if self._training_generator is None:
            self._training_generator = (self.data_generator
                                        .flow_from_directory)(
                                            self.path,
                                            batch_size=self.batch_size,
                                            class_mode="binary",
                                            target_size=(300, 300),
                                            subset="training",
            )
        return self._training_generator

    @property
    def validation_generator(self):
        """the validation data generator"""
        if self._validation_generator is None:
            self._validation_generator = (self.data_generator
                                          .flow_from_directory)(
                                              self.path,
                                              batch_size=self.batch_size,
                                              class_mode="binary",
                                              target_size = (300, 300),
                                              subset="validation",
            )
        return self._validation_generator

    def __str__(self) -> str:
        return (f"(Data) - Path: {self.path}, "
                f"Validation Split: {self.validation_split},"
                f"Batch Size: {self.batch_size}")

A Model Builder

class Network:
    """The model to categorize the images

    Args:
     model: model to train
     path: path to the training data
     epochs: number of epochs to train
     batch_size: size of the batches for each epoch
     convolution_layers: layers of cnn/max-pooling
     callbacks: things to stop the training
     set_steps: whether to set the training steps-per-epoch
    """
    def __init__(self, model, path: str, epochs: int=15,
                 batch_size: int=128, convolution_layers: int=3,
                 set_steps: bool=True,
                 callbacks: list=None) -> None:
        self.model = model
        self.path = path
        self.epochs = epochs
        self.batch_size = batch_size
        self.convolution_layers = convolution_layers
        self.set_steps = set_steps
        self.callbacks = callbacks
        self._data = None
        self._model = None
        self.history = None
        return

    @property
    def data(self) -> Data:
        """The data generator builder"""
        if self._data is None:
            self._data = Data(self.path, batch_size=self.batch_size)
        return self._data

    def summary(self) -> None:
        """Prints the model summary"""
        print(self.model.summary())
        return

    def train(self) -> None:
        """Trains the model"""
        callbacks = self.callbacks if self.callbacks else []
        arguments = dict(
            generator=self.data.training_generator,
            validation_data=self.data.validation_generator,
            epochs = self.epochs,
            callbacks = callbacks,
            verbose=2,
        )
        if self.set_steps:
            arguments["steps_per_epoch"] = int(
                self.data.training_generator.samples/self.batch_size)
            arguments["validation_steps"] = int(
                self.data.validation_generator.samples/self.batch_size)

        self.history = self.model.fit_generator(**arguments)
        return

    def __str__(self) -> str:
        return (f"(Network) - \nPath: {self.path}\n Epochs: {self.epochs}\n "
                f"Batch Size: {self.batch_size}\n Callbacks: {self.callbacks}\n"
                f"Data: {self.data}\n"
                f"Callbacks: {self.callbacks}")

Train It

good_enough = GoodEnough()
network = Network(model, Path(OUTPUT).expanduser(), 
                  set_steps = True,
                  epochs = 40,
                  callbacks=[checkpoint, good_enough],
                  batch_size=1)
with TIMER:
    network.train()
2019-08-03 19:28:04,102 graeae.timers.timer start: Started: 2019-08-03 19:28:04.102954
I0803 19:28:04.102986 139918777980736 timer.py:70] Started: 2019-08-03 19:28:04.102954
Found 20000 images belonging to 2 classes.
Found 5000 images belonging to 2 classes.
Epoch 1/10

Epoch 00001: val_acc improved from -inf to 0.43660, saving model to /home/athena/models/dogs-vs-cats/inception_transfer.hdf5
20000/20000 - 615s - loss: 0.7032 - acc: 0.4977 - val_loss: 0.8069 - val_acc: 0.4366
Epoch 2/10

Epoch 00002: val_acc improved from 0.43660 to 0.43780, saving model to /home/athena/models/dogs-vs-cats/inception_transfer.hdf5
20000/20000 - 631s - loss: 0.6933 - acc: 0.5049 - val_loss: 0.7958 - val_acc: 0.4378
Epoch 3/10

Epoch 00003: val_acc did not improve from 0.43780
20000/20000 - 670s - loss: 0.6932 - acc: 0.4990 - val_loss: 0.8142 - val_acc: 0.4230
Epoch 4/10

Epoch 00004: val_acc improved from 0.43780 to 0.45020, saving model to /home/athena/models/dogs-vs-cats/inception_transfer.hdf5
20000/20000 - 666s - loss: 0.6932 - acc: 0.4990 - val_loss: 0.7856 - val_acc: 0.4502
Epoch 5/10

Epoch 00005: val_acc did not improve from 0.45020
20000/20000 - 636s - loss: 0.6932 - acc: 0.4983 - val_loss: 0.7982 - val_acc: 0.4312
Epoch 6/10

Epoch 00006: val_acc did not improve from 0.45020
20000/20000 - 618s - loss: 0.6932 - acc: 0.4999 - val_loss: 0.8018 - val_acc: 0.4326
Epoch 7/10

Epoch 00007: val_acc did not improve from 0.45020
20000/20000 - 614s - loss: 0.6932 - acc: 0.4999 - val_loss: 0.7870 - val_acc: 0.4484
Epoch 8/10

Epoch 00008: val_acc improved from 0.45020 to 0.45660, saving model to /home/athena/models/dogs-vs-cats/inception_transfer.hdf5
20000/20000 - 607s - loss: 0.6932 - acc: 0.4981 - val_loss: 0.7773 - val_acc: 0.4566
Epoch 9/10

Epoch 00009: val_acc did not improve from 0.45660
20000/20000 - 608s - loss: 0.6932 - acc: 0.4891 - val_loss: 0.7811 - val_acc: 0.4414
Epoch 10/10

Epoch 00010: val_acc did not improve from 0.45660
20000/20000 - 619s - loss: 0.6932 - acc: 0.5010 - val_loss: 0.7878 - val_acc: 0.4474
2019-08-03 21:12:49,142 graeae.timers.timer end: Ended: 2019-08-03 21:12:49.142478
I0803 21:12:49.142507 139918777980736 timer.py:77] Ended: 2019-08-03 21:12:49.142478
2019-08-03 21:12:49,143 graeae.timers.timer end: Elapsed: 1:44:45.039524
I0803 21:12:49.143225 139918777980736 timer.py:78] Elapsed: 1:44:45.039524

Raw

#+begin_comment import os import tensorflow as tf from tensorflow.keras import layers from tensorflow.keras import Model >

Epoch 00002: val_acc improved from 0.88780 to 0.89268, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 61s - loss: 0.7190 - acc: 0.5255 - val_loss: 0.5419 - val_acc: 0.8927 Epoch 3/40

Epoch 00003: val_acc improved from 0.89268 to 0.92195, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 61s - loss: 0.7102 - acc: 0.5170 - val_loss: 0.5290 - val_acc: 0.9220 Epoch 4/40

Epoch 00004: val_acc did not improve from 0.92195 822/822 - 60s - loss: 0.7103 - acc: 0.5097 - val_loss: 0.5357 - val_acc: 0.8146 Epoch 5/40

Epoch 00005: val_acc did not improve from 0.92195 822/822 - 60s - loss: 0.7051 - acc: 0.5012 - val_loss: 0.5330 - val_acc: 0.6780 Epoch 6/40

Epoch 00006: val_acc did not improve from 0.92195 822/822 - 64s - loss: 0.7006 - acc: 0.5012 - val_loss: 0.5969 - val_acc: 0.5317 Epoch 7/40

Epoch 00007: val_acc did not improve from 0.92195 822/822 - 63s - loss: 0.7009 - acc: 0.5109 - val_loss: 0.5356 - val_acc: 0.9122 Epoch 8/40

Epoch 00008: val_acc did not improve from 0.92195 822/822 - 62s - loss: 0.7025 - acc: 0.4878 - val_loss: 0.5103 - val_acc: 0.9073 Epoch 9/40

Epoch 00009: val_acc did not improve from 0.92195 822/822 - 60s - loss: 0.6972 - acc: 0.5207 - val_loss: 0.5321 - val_acc: 0.7561 Epoch 10/40

Epoch 00010: val_acc did not improve from 0.92195 822/822 - 61s - loss: 0.6946 - acc: 0.5316 - val_loss: 0.5102 - val_acc: 0.9220 Epoch 11/40

Epoch 00011: val_acc did not improve from 0.92195 822/822 - 62s - loss: 0.6966 - acc: 0.5365 - val_loss: 0.5149 - val_acc: 0.8488 Epoch 12/40

Epoch 00012: val_acc did not improve from 0.92195 822/822 - 62s - loss: 0.6981 - acc: 0.5073 - val_loss: 0.5266 - val_acc: 0.8293 Epoch 13/40

Epoch 00013: val_acc did not improve from 0.92195 822/822 - 62s - loss: 0.6949 - acc: 0.5182 - val_loss: 0.5046 - val_acc: 0.8780 Epoch 14/40

Epoch 00014: val_acc improved from 0.92195 to 0.95122, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 62s - loss: 0.6957 - acc: 0.5170 - val_loss: 0.4872 - val_acc: 0.9512 Epoch 15/40

Epoch 00015: val_acc did not improve from 0.95122 822/822 - 61s - loss: 0.6944 - acc: 0.5049 - val_loss: 0.4904 - val_acc: 0.9366 Epoch 16/40

Epoch 00016: val_acc did not improve from 0.95122 822/822 - 60s - loss: 0.6920 - acc: 0.5158 - val_loss: 0.5201 - val_acc: 0.7463 Epoch 17/40

Epoch 00017: val_acc did not improve from 0.95122 822/822 - 60s - loss: 0.6951 - acc: 0.4988 - val_loss: 0.4872 - val_acc: 0.8488 Epoch 18/40

Epoch 00018: val_acc improved from 0.95122 to 0.97073, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 61s - loss: 0.6927 - acc: 0.5377 - val_loss: 0.4889 - val_acc: 0.9707 Epoch 19/40

Epoch 00019: val_acc did not improve from 0.97073 822/822 - 63s - loss: 0.6900 - acc: 0.5255 - val_loss: 0.4912 - val_acc: 0.7854 Epoch 20/40

Epoch 00020: val_acc did not improve from 0.97073 822/822 - 64s - loss: 0.6927 - acc: 0.5243 - val_loss: 0.4651 - val_acc: 0.8878 Epoch 21/40

Epoch 00021: val_acc did not improve from 0.97073 822/822 - 64s - loss: 0.6914 - acc: 0.5304 - val_loss: 0.4368 - val_acc: 0.9659 Epoch 22/40

Epoch 00022: val_acc improved from 0.97073 to 0.97561, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 65s - loss: 0.6881 - acc: 0.5341 - val_loss: 0.4350 - val_acc: 0.9756 Epoch 23/40

Epoch 00023: val_acc did not improve from 0.97561 822/822 - 62s - loss: 0.6914 - acc: 0.5401 - val_loss: 0.4421 - val_acc: 0.8439 Epoch 24/40

Epoch 00024: val_acc improved from 0.97561 to 0.99024, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 61s - loss: 0.6887 - acc: 0.5511 - val_loss: 0.3974 - val_acc: 0.9902 Epoch 25/40

Epoch 00025: val_acc did not improve from 0.99024 822/822 - 62s - loss: 0.6855 - acc: 0.5535 - val_loss: 0.3716 - val_acc: 0.9902 Epoch 26/40

Epoch 00026: val_acc did not improve from 0.99024 822/822 - 63s - loss: 0.6865 - acc: 0.5389 - val_loss: 0.3736 - val_acc: 0.9610 Epoch 27/40

Epoch 00027: val_acc did not improve from 0.99024 822/822 - 60s - loss: 0.6823 - acc: 0.5718 - val_loss: 0.3799 - val_acc: 0.9220 Epoch 28/40

Epoch 00028: val_acc did not improve from 0.99024 822/822 - 61s - loss: 0.6875 - acc: 0.5474 - val_loss: 0.3530 - val_acc: 0.9902 Epoch 29/40

Epoch 00029: val_acc did not improve from 0.99024 822/822 - 60s - loss: 0.6881 - acc: 0.5487 - val_loss: 0.3376 - val_acc: 0.9902 Epoch 30/40

Epoch 00030: val_acc did not improve from 0.99024 822/822 - 62s - loss: 0.6857 - acc: 0.5462 - val_loss: 0.3216 - val_acc: 0.9707 Epoch 31/40

Epoch 00031: val_acc did not improve from 0.99024 822/822 - 62s - loss: 0.6847 - acc: 0.5450 - val_loss: 0.3025 - val_acc: 0.9902 Epoch 32/40

Epoch 00032: val_acc improved from 0.99024 to 0.99512, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 64s - loss: 0.6821 - acc: 0.5535 - val_loss: 0.2852 - val_acc: 0.9951 Epoch 33/40

Epoch 00033: val_acc did not improve from 0.99512 822/822 - 62s - loss: 0.6793 - acc: 0.5669 - val_loss: 0.2617 - val_acc: 0.9854 Epoch 34/40

Epoch 00034: val_acc did not improve from 0.99512 822/822 - 60s - loss: 0.6772 - acc: 0.5937 - val_loss: 0.2565 - val_acc: 0.9707 Epoch 35/40

Epoch 00035: val_acc did not improve from 0.99512 822/822 - 61s - loss: 0.6766 - acc: 0.5803 - val_loss: 0.2190 - val_acc: 0.9951 Epoch 36/40

Epoch 00036: val_acc did not improve from 0.99512 822/822 - 63s - loss: 0.6726 - acc: 0.5937 - val_loss: 0.2423 - val_acc: 0.9463 Epoch 37/40

Epoch 00037: val_acc did not improve from 0.99512 822/822 - 61s - loss: 0.6735 - acc: 0.5669 - val_loss: 0.2106 - val_acc: 0.9902 Epoch 38/40

Epoch 00038: val_acc improved from 0.99512 to 1.00000, saving model to /home/athena/models/horses-vs-humans/inception_transfer.hdf5 822/822 - 61s - loss: 0.6718 - acc: 0.5949 - val_loss: 0.1868 - val_acc: 1.0000 Epoch 39/40

Epoch 00039: val_acc did not improve from 1.00000 822/822 - 60s - loss: 0.6647 - acc: 0.6119 - val_loss: 0.2140 - val_acc: 0.9610 Epoch 40/40

Epoch 00040: val_acc did not improve from 1.00000 822/822 - 60s - loss: 0.6671 - acc: 0.5815 - val_loss: 0.1823 - val_acc: 0.9707 2019-08-18 14:37:14,814 graeae.timers.timer end: Ended: 2019-08-18 14:37:14.814322 I0818 14:37:14.814355 139914340390720 timer.py:77] Ended: 2019-08-18 14:37:14.814322 2019-08-18 14:37:14,815 graeae.timers.timer end: Elapsed: 0:41:03.671258 I0818 14:37:14.815070 139914340390720 timer.py:78] Elapsed: 0:41:03.671258 #+end_example

So, we got 100% accuracy… that seems to be an overfitting. Also, why didn't the callback stop it? On further inspection I noticed that the training accuracy never gets to 100%, while the validation accuracy does… that seems odd.

history_ = pandas.DataFrame.from_dict(model.history.history)
history = history_.rename(columns={"loss": "Training Loss",
                                   "acc": "Training Accuracy",
                                   "val_loss": "Validation Loss",
                                   "val_acc": "Validation Accuracy"})
plot = history.hvplot().opts(
    title="Loss and Accuracy of the Horses Vs Humans Model",
    height=800,
    width=1000,
)
Embed(plot=plot, file_name="model_history")()

Figure Missing

So this is a little weird - should Validation Accuracy start out that high? Maybe, since the original network is pre-trained… And why does the Validation Loss improve faster than the Training Loss?

Testing

I don't remember downloading this, but there's a separate folder called "validation" which I'm assuming has a different set of image files.

model = load_model(best_model)
target_size = (300, 300)
def predict(model, filename):
    loaded = cv2.imread(str(filename))
    x = cv2.resize(loaded, target_size)/255
    x = numpy.reshape(x, (1, 300, 300, 3))
    return model.predict(x)
path = Path("~/data/datasets/images/horse-or-human/validation/").expanduser()
target_size = (300, 300)
correct = 0
for index, filename in enumerate((path/"horses").iterdir()):
    prediction = predict(model, filename)
    correct += 0 if prediction[0] > 0.5 else 1
print(f"Fraction of horses correctly classified: {correct/(index + 1):.2f}")
Fraction of horses correctly classified: 0.99
correct = 0
for index, filename in enumerate((path/"humans").iterdir()):
    prediction = predict(model, filename)
    correct += 1 if prediction[0] > 0.5 else 0
print(f"Fraction of humans correctly classified: {correct/(index + 1):.2f}")
Fraction of humans correctly classified: 1.00

The testing images are slightly different in that they are on a white background, rather than a simulated background.