def Net():
input1 = Input(shape=(50, 5, 3))
X21 = layers.Conv2D(filters=3,kernel_size=(5,2),padding="valid")(input1)
X21 = layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(X21)
X21 = layers.Activation('relu')(X21)
X21 = layers.MaxPooling2D(pool_size=(2,1),padding="valid")(X21)
#X21 = layers.Dropout(0.5)(X21)
X21 = layers.Conv2D(filters=3,kernel_size=(3,2),padding="valid")(X21)
X21 = layers.Flatten()(X21)
X21 = layers.Dense(128, activation='relu')(X21)
X21 = layers.Dropout(0.5)(X21)
X2 = layers.Dense(2*n1)(X21)
X2 = 0.3*X2
input2 = Input(shape=(50, 5, 5, 7))
X31 = layers.Conv3D(filters=7, kernel_size=(15, 3 ,3), padding="valid")(input2)
X31 = layers.Activation('relu')(X31)
X31 = layers.BatchNormalization(axis=-1, momentum=0.99, epsilon=0.001, center=True, scale=True, beta_initializer='zeros', gamma_initializer='ones', moving_mean_initializer='zeros', moving_variance_initializer='ones', beta_regularizer=None, gamma_regularizer=None, beta_constraint=None, gamma_constraint=None)(X31)
X31 = layers.Dropout(0.5)(X31)
X310 = layers.Flatten()(X31)
X3 = layers.Dense(128, activation='relu')(X310)
X3 = layers.Dense(2*n1,activation='relu')(X3)
X = layers.concatenate([X2, X3])
X = layers.Dense(n1)(X)
return Model([input1,input2],X)
######################
model = Net()
history = model.fit([Train_x1,Train_x2],Train_y, epochs=Epochs, batch_size=256, shuffle=True,verbose=1,
validation_data=([Train_x1,Train_x2],Train_y))
是孪生的,FeatureNetwork()的功能和上面的功能相同,为方便选择,在ClassiFilerNet()函数中加入了判断是否使用共享参数模型功能,令reuse=True,便使用的是共享参数的模型。
关键地方就在,只使用的一次Model,也就是说只创建了一次模型,虽然输入了两个输入,但其实使用的是同一个模型,因此权重共享的。
你就建立了一个model当然是孪生的。
这是孪生网络,卷积之间有共享权值,可以用于预测回归