求解答生成对抗网络 ValueError: Unknown activation function:ReLU

from keras.layers import Dense, Reshape, ReLU, LeakyReLU

def generator_model():
model = Sequential()
model.add(Dense(input_dim=100, output_dim=1024))
model.add(Activation('ReLU'))

model.add(Dense(128 * 8 * 8))
model.add(BatchNormalization(momentum=0.8))
model.add(Activation('ReLU'))

model.add(Reshape((8, 8, 128), input_shape=(8 * 8 * 128,)))
model.add(UpSampling2D(size=(4, 4)))

model.add(Conv2D(64, (5, 5), padding='same'))
model.add(Activation('ReLU'))

model.add(UpSampling2D(size=(2, 2)))
model.add(Conv2D(3, (5, 5), padding='same'))
model.add(Activation('tanh'))
# model.summary()
model.summary()
return model()

ValueError: Unknown activation function:ReLU

该如何解决啊