Keras:MobileNet用于本地化图像特征

2024-03-28 14:43:14 发布

您现在位置:Python中文网/ 问答频道 /正文

我有一个自定义图像集,我正在尝试本地化该图像中的4个功能。这些值是x,y坐标。我看过一些基本的CNN新闻,这些都很好。我现在的目标是转换成MobileNet。在

我在使用Keras的内置MobileNet&;code时遇到了问题。。。所以我用适当的层来模拟结构。基本模型似乎是面向分类的,而我的模型只是试图定位8x,y坐标。我已经尽力去适应不同的输出层,但是到目前为止,我的损失是相当严重的,学习率很高。在

我哪里出错了?由于MobileNet被宣传为能够适应对象检测和分类,我觉得我好像因为不能使用内部MN模型而遗漏了一些东西。在

我的实现(棘手的一点似乎是结束):

model = Sequential()

model.add(Conv2D(32, (3, 3), strides=(2, 2), padding='same',
                 use_bias=False, input_shape=(224, 224, 3)))
model.add(BatchNormalization())
model.add(Activation('relu'))

# block 1 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(64, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 2 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(2, 2), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(128, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 3 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(128, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 4 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(2, 2), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(256, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 5 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(256, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 6 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(2, 2), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 7 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 8 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 9 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 10 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 11 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(512, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 12 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(2, 2), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(1024, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########

# block 13 ###########
model.add(ZeroPadding2D(padding=(1, 1)))
model.add(DepthwiseConv2D((3, 3), padding='valid',
                          depth_multiplier=1, strides=(1, 1), use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
model.add(Conv2D(1024, (1, 1), strides=(1, 1), padding='same', use_bias=False))
model.add(BatchNormalization())
model.add(Activation('relu'))
###########


# final -- tried shaping for my output
model.add(GlobalAveragePooling2D())
model.add(Reshape((1, 1, 1024)))
model.add(Dropout(1e-3))
model.add(Conv2D(8, (1, 1), padding='same'))
model.add(Activation('softmax'))
model.add(Reshape((8,)))

# output layers that give me a better result for 8 keypoints
# model.add(Flatten())
# model.add(Dense(2000))
# model.add(Activation('relu'))
# model.add(Dropout(0.5))  # !
# model.add(Dense(2000))
# model.add(Activation('relu'))
# model.add(Dense(8))

Tags: addfalsemodeluseblockactivationrelusame