import matplotlib
matplotlib.use("Agg")
from sklearn.preprocessing import LabelBinarizer
from pyimagesearch.nn.conv import MiniGoogLeNet
from pyimagesearch.callbacks import TrainingMonitor
from keras.preprocessing.image import ImageDataGenerator
from keras.callbacks import LearningRateScheduler
from keras.optimizers import SGD
from keras.datasets import cifar10
import numpy as np
import argparse
import os
NUM_EPOCHS = 70
INIT_LR = 5e-3
def poly_decay(epoch):
maxEpochs = NUM_EPOCHS
baseLR = INIT_LR
power = 1.0
alpha = baseLR * (1 - (epoch / float(maxEpochs))) ** power
return alpha
ap = argparse.ArgumentParser()
ap.add_argument("-m", "--model", required=True,
help="path to output model")
ap.add_argument("-o", "--output", required=True,
help="path to output directory (logs, plots, etc.)")
args = vars(ap.parse_args())
print("[INFO] loading CIFAR-10 data...")
((trainX, trainY), (testX, testY)) = cifar10.load_data()
trainX = trainX.astype("float")
testX = testX.astype("float")
mean = np.mean(trainX, axis=0)
trainX -= mean
testX -= mean
lb = LabelBinarizer()
trainY = lb.fit_transform(trainY)
testY = lb.transform(testY)
aug = ImageDataGenerator(width_shift_range=0.1, height_shift_range=0.1,
horizontal_flip=True,
fill_mode="nearest")
figPath = os.path.sep.join([args["output"], "{}.png".format(os.getpid())])
jsonPath = os.path.sep.join([args["output"], "{}.json".format(os.getpid())])
callbacks = [TrainingMonitor(figPath, jsonPath=jsonPath),
LearningRateScheduler(poly_decay)]
print("[INFO] compiling model...")
opt = SGD(lr=INIT_LR, momentum=0.9)
model = MiniGoogLeNet.build(width=32, height=32, depth=3, classes=10)
model.compile(loss="categorical_crossentropy", optimizer=opt,
metrics=["accuracy"])
print("[INFO] training network...")
model.fit_generator(aug.flow(trainX, trainY, batch_size=64),
validation_data=(testX, testY), steps_per_epoch=len(trainX) // 64,
epochs=NUM_EPOCHS, callbacks=callbacks, verbose=1)
print("[INFO] serializing network...")
model.save(args["model"])
MiniGoogLeNet的代码有点问题,参考下面这个。
# import the necessary packages
from tensorflow.keras.layers import BatchNormalization
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import AveragePooling2D
from tensorflow.keras.layers import MaxPooling2D
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Dropout
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import Input
from tensorflow.keras.models import Model
from tensorflow.keras.layers import concatenate
from tensorflow.keras import backend as K
class MiniGoogLeNet:
@staticmethod
def conv_module(x, K, kX, kY, stride, chanDim, padding="same"):
# define a CONV => BN => RELU pattern
x = Conv2D(K, (kX, kY), strides=stride, padding=padding)(x)
x = BatchNormalization(axis=chanDim)(x)
x = Activation("relu")(x)
# return the block
return x
@staticmethod
def inception_module(x, numK1x1, numK3x3, chanDim):
# define two CONV modules, then concatenate across the
# channel dimension
conv_1x1 = MiniGoogLeNet.conv_module(x, numK1x1, 1, 1, (1, 1), chanDim)
conv_3x3 = MiniGoogLeNet.conv_module(x, numK3x3, 3, 3, (1, 1), chanDim)
x = concatenate([conv_1x1, conv_3x3], axis=chanDim)
# return the block
return x
@staticmethod
def downsample_module(x, K, chanDim):
# define the CONV module and POOL, then concatenate
# across the channel dimensions
conv_3x3 = MiniGoogLeNet.conv_module(x, K, 3, 3, (2, 2), chanDim, padding="valid")
pool = MaxPooling2D((3, 3), strides=(2, 2))(x)
x = concatenate([conv_3x3, pool], axis=chanDim)
# return the block
return x
@staticmethod
def build(width, height, depth, classes):
# initialize the input shape to be "channels last" and the
# channels dimension itself
inputShape = (height, width, depth)
chanDim = -1
# if we are using "channels first", update the input shape
# and channels dimension
if K.image_data_format() == "channels_first":
inputShape = (depth, height, width)
chanDim = 1
# define the model input and first CONV module
inputs = Input(shape=inputShape)
x = MiniGoogLeNet.conv_module(inputs, 96, 3, 3, (1, 1), chanDim)
# two Inception modules followed by a downsample module
x = MiniGoogLeNet.inception_module(x, 32, 32, chanDim)
x = MiniGoogLeNet.inception_module(x, 32, 48, chanDim)
x = MiniGoogLeNet.downsample_module(x, 80, chanDim)
# four Inception modules followed by a downsample module
x = MiniGoogLeNet.inception_module(x, 112, 48, chanDim)
x = MiniGoogLeNet.inception_module(x, 96, 64, chanDim)
x = MiniGoogLeNet.inception_module(x, 80, 80, chanDim)
x = MiniGoogLeNet.inception_module(x, 48, 96, chanDim)
x = MiniGoogLeNet.downsample_module(x, 96, chanDim)
# two Inception modules followed by global POOL and dropout
x = MiniGoogLeNet.inception_module(x, 176, 160, chanDim)
x = MiniGoogLeNet.inception_module(x, 176, 160, chanDim)
x = AveragePooling2D((7, 7))(x)
x = Dropout(0.5)(x)
# softmax classifier
x = Flatten()(x)
x = Dense(classes)(x)
x = Activation("softmax")(x)
# create the model
model = Model(inputs, x, name="googlenet")
# return the constructed network architecture
return model
MiniGoogLeNet是函数还是类?NoneType应该是model为None
有可能是Keras版本问题,你确定你的环境和书上的一致吗?
你的报错信息表示MiniGoogLeNet.build函数返回的model为None,你得检查一下MiniGoogLeNet.build函数是否正常执行,其返回的模型是否正常可用。比如执行一下model.summary()看能否正常输出模型结构,或者print(model)。
最好去看一下MiniGoogLeNet.build函数的源码