扫二维码与项目经理沟通
我们在微信上24小时期待你的声音
解答本文疑问/技术咨询/运营咨询/技术建议/互联网交流
这篇文章主要为大家展示了如何使用keras根据层名称来初始化网络,内容简而易懂,希望大家可以学习一下,学习完之后肯定会有收获的,下面让小编带大家一起来看看吧。
让客户满意是我们工作的目标,不断超越客户的期望值来自于我们对这个行业的热爱。我们立志把好的技术通过有效、简单的方式提供给客户,将通过不懈努力成为客户在信息化领域值得信任、有价值的长期合作伙伴,公司提供的服务项目有:主机域名、网络空间、营销软件、网站建设、互助网站维护、网站推广。keras根据层名称来初始化网络
def get_model(input_shape1=[75, 75, 3], input_shape2=[1], weights=None): bn_model = 0 trainable = True # kernel_regularizer = regularizers.l2(1e-4) kernel_regularizer = None activation = 'relu' img_input = Input(shape=input_shape1) angle_input = Input(shape=input_shape2) # Block 1 x = Conv2D(64, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block1_conv1')(img_input) x = Conv2D(64, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block1_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block1_pool')(x) # Block 2 x = Conv2D(128, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block2_conv1')(x) x = Conv2D(128, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block2_conv2')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block2_pool')(x) # Block 3 x = Conv2D(256, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block3_conv1')(x) x = Conv2D(256, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block3_conv2')(x) x = Conv2D(256, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block3_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block3_pool')(x) # Block 4 x = Conv2D(512, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block4_conv1')(x) x = Conv2D(512, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block4_conv2')(x) x = Conv2D(512, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block4_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block4_pool')(x) # Block 5 x = Conv2D(512, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block5_conv1')(x) x = Conv2D(512, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block5_conv2')(x) x = Conv2D(512, (3, 3), activation=activation, padding='same', trainable=trainable, kernel_regularizer=kernel_regularizer, name='block5_conv3')(x) x = MaxPooling2D((2, 2), strides=(2, 2), name='block5_pool')(x) branch_1 = GlobalMaxPooling2D()(x) # branch_1 = BatchNormalization(momentum=bn_model)(branch_1) branch_2 = GlobalAveragePooling2D()(x) # branch_2 = BatchNormalization(momentum=bn_model)(branch_2) branch_3 = BatchNormalization(momentum=bn_model)(angle_input) x = (Concatenate()([branch_1, branch_2, branch_3])) x = Dense(1024, activation=activation, kernel_regularizer=kernel_regularizer)(x) # x = Dropout(0.5)(x) x = Dense(1024, activation=activation, kernel_regularizer=kernel_regularizer)(x) x = Dropout(0.6)(x) output = Dense(1, activation='sigmoid')(x) model = Model([img_input, angle_input], output) optimizer = Adam(lr=1e-5, beta_1=0.9, beta_2=0.999, epsilon=1e-8, decay=0.0) model.compile(loss='binary_crossentropy', optimizer=optimizer, metrics=['accuracy']) if weights is not None: # 将by_name设置成True model.load_weights(weights, by_name=True) # layer_weights = h6py.File(weights, 'r') # for idx in range(len(model.layers)): # model.set_weights() print 'have prepared the model.' return model
我们在微信上24小时期待你的声音
解答本文疑问/技术咨询/运营咨询/技术建议/互联网交流