报错信息如下
Traceback (most recent call last):
File "E:\Graduation project\python project\train.py", line 88, in <module>
history = dnet.fit(
File "E:\Anaconda\lib\site-packages\keras\utils\traceback_utils.py", line 67, in error_handler
raise e.with_traceback(filtered_tb) from None
File "E:\Anaconda\lib\site-packages\tensorflow\python\framework\func_graph.py", line 1147, in autograph_handler
raise e.ag_error_metadata.to_exception(e)
ValueError: in user code:
File "E:\Anaconda\lib\site-packages\keras\engine\training.py", line 1021, in train_function *
return step_function(self, iterator)
File "E:\Anaconda\lib\site-packages\keras\engine\training.py", line 1010, in step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "E:\Anaconda\lib\site-packages\keras\engine\training.py", line 1000, in run_step **
outputs = model.train_step(data)
File "E:\Anaconda\lib\site-packages\keras\engine\training.py", line 860, in train_step
loss = self.compute_loss(x, y, y_pred, sample_weight)
File "E:\Anaconda\lib\site-packages\keras\engine\training.py", line 918, in compute_loss
return self.compiled_loss(
File "E:\Anaconda\lib\site-packages\keras\engine\compile_utils.py", line 201, in __call__
loss_value = loss_obj(y_t, y_p, sample_weight=sw)
File "E:\Anaconda\lib\site-packages\keras\losses.py", line 141, in __call__
losses = call_fn(y_true, y_pred)
File "E:\Anaconda\lib\site-packages\keras\losses.py", line 245, in call **
return ag_fn(y_true, y_pred, **self._fn_kwargs)
File "E:\Anaconda\lib\site-packages\keras\losses.py", line 1789, in categorical_crossentropy
return backend.categorical_crossentropy(
File "E:\Anaconda\lib\site-packages\keras\backend.py", line 5083, in categorical_crossentropy
target.shape.assert_is_compatible_with(output.shape)
ValueError: Shapes (16, 4) and (16, 3) are incompatible
相关代码如下
import tensorflow.compat.v2 as tf
import numpy as np
import cv2
import matplotlib.pyplot as plt
from skimage import io
import os
import pickle
import sys
"""Preparing our Data"""
DATA_PATH = 'E:\Graduation project\python project' # Path to folder containing data
shape_to_label = {'rock':np.array([1.,0.,0.,0.]),'paper':np.array([0.,1.,0.,0.]),'scissor':np.array([0.,0.,1.,0.]),'ok':np.array([0.,0.,0.,1.])}
arr_to_shape = {np.argmax(shape_to_label[x]):x for x in shape_to_label.keys()}
imgData = list()
labels = list()
for dr in os.listdir(DATA_PATH):
if dr not in ['rock','paper','scissor']:
continue
print(dr)
lb = shape_to_label[dr]
i = 0
for pic in os.listdir(os.path.join(DATA_PATH,dr)):
path = os.path.join(DATA_PATH,dr+'/'+pic)
img = cv2.imread(path)
imgData.append([img,lb])
imgData.append([cv2.flip(img, 1),lb]) #horizontally flipped image
imgData.append([cv2.resize(img[50:250,50:250],(300,300)),lb]) # zoom : crop in and resize
i+=3
print(i)
np.random.shuffle(imgData)
imgData,labels = zip(*imgData)
imgData = np.array(imgData)
labels = np.array(labels)
"""Model"""
from keras.models import Sequential,load_model
from keras.layers import Dense,MaxPool2D,Dropout,Flatten,Conv2D,GlobalAveragePooling2D,Activation
from keras.callbacks import ModelCheckpoint, EarlyStopping
from keras.optimizers import adam_v2
from keras.applications.densenet import DenseNet121
"""DenseNet"""
densenet = DenseNet121(include_top=False, weights='imagenet', classes=3,input_shape=(300,300,3))
densenet.trainable=True
def genericModel(base):
model = Sequential()
model.add(base)
model.add(MaxPool2D())
model.add(Flatten())
model.add(Dense(3,activation='softmax'))
model.compile(optimizer=adam_v2.Adam(),loss='categorical_crossentropy',metrics=['acc'])
return model
dnet = genericModel(densenet)
checkpoint = ModelCheckpoint(
'model.h5',
monitor='val_acc',
verbose=1,
save_best_only=True,
save_weights_only=True,
mode='auto'
)
es = EarlyStopping(patience = 3)
history = dnet.fit(
x=imgData,
y=labels,
batch_size = 16,
epochs=8,
callbacks=[checkpoint,es],
validation_split=0.2
)
dnet.save_weights('model.h5')
with open("model.json", "w") as json_file:
json_file.write(dnet.to_json())
https://github.com/HOD101s/RockPaperScissor-AI-