189 8069 5689

keras实现多种分类网络的方法-创新互联

不懂keras实现多种分类网络的方法?其实想解决这个问题也不难,下面让小编带着大家一起学习怎么去解决,希望大家阅读完这篇文章后大所收获。

成都创新互联是一家集网站建设,鄂尔多斯企业网站建设,鄂尔多斯品牌网站建设,网站定制,鄂尔多斯网站建设报价,网络营销,网络优化,鄂尔多斯网站推广为一体的创新建站企业,帮助传统企业提升企业形象加强企业竞争力。可充分满足这一群体相比中小企业更为丰富、高端、多元的互联网需求。同时我们时刻保持专业、时尚、前沿,时刻以成就客户成长自我,坚持不断学习、思考、沉淀、净化自己,让我们为更多的企业打造出实用型网站。

Keras应该是最简单的一种深度学习框架了,入门非常的简单.

简单记录一下keras实现多种分类网络:如AlexNet、Vgg、ResNet

采用kaggle猫狗大战的数据作为数据集.

由于AlexNet采用的是LRN标准化,Keras没有内置函数实现,这里用batchNormalization代替

收件建立一个model.py的文件,里面存放着alexnet,vgg两种模型,直接导入就可以了

#coding=utf-8
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, Flatten
from keras.layers import Conv2D, MaxPooling2D, ZeroPadding2D, BatchNormalization
from keras.layers import *
from keras.layers.advanced_activations import LeakyReLU,PReLU
from keras.models import Model
 
def keras_batchnormalization_relu(layer):
 BN = BatchNormalization()(layer)
 ac = PReLU()(BN)
 return ac
 
def AlexNet(resize=227, classes=2):
 model = Sequential()
 # 第一段
 model.add(Conv2D(filters=96, kernel_size=(11, 11),
      strides=(4, 4), padding='valid',
      input_shape=(resize, resize, 3),
      activation='relu'))
 model.add(BatchNormalization())
 model.add(MaxPooling2D(pool_size=(3, 3),
       strides=(2, 2),
       padding='valid'))
 # 第二段
 model.add(Conv2D(filters=256, kernel_size=(5, 5),
      strides=(1, 1), padding='same',
      activation='relu'))
 model.add(BatchNormalization())
 model.add(MaxPooling2D(pool_size=(3, 3),
       strides=(2, 2),
       padding='valid'))
 # 第三段
 model.add(Conv2D(filters=384, kernel_size=(3, 3),
      strides=(1, 1), padding='same',
      activation='relu'))
 model.add(Conv2D(filters=384, kernel_size=(3, 3),
      strides=(1, 1), padding='same',
      activation='relu'))
 model.add(Conv2D(filters=256, kernel_size=(3, 3),
      strides=(1, 1), padding='same',
      activation='relu'))
 model.add(MaxPooling2D(pool_size=(3, 3),
       strides=(2, 2), padding='valid'))
 # 第四段
 model.add(Flatten())
 model.add(Dense(4096, activation='relu'))
 model.add(Dropout(0.5))
 
 model.add(Dense(4096, activation='relu'))
 model.add(Dropout(0.5))
 
 model.add(Dense(1000, activation='relu'))
 model.add(Dropout(0.5))
 
 # Output Layer
 model.add(Dense(classes,activation='softmax'))
 # model.add(Activation('softmax'))
 
 return model
 
def AlexNet2(inputs, classes=2, prob=0.5):
 '''
 自己写的函数,尝试keras另外一种写法
 :param inputs: 输入
 :param classes: 类别的个数
 :param prob: dropout的概率
 :return: 模型
 '''
 # Conv2D(32, (3, 3), dilation_rate=(2, 2), padding='same')(inputs)
 print "input shape:", inputs.shape
 
 conv1 = Conv2D(filters=96, kernel_size=(11, 11), strides=(4, 4), padding='valid')(inputs)
 conv1 = keras_batchnormalization_relu(conv1)
 print "conv1 shape:", conv1.shape
 pool1 = MaxPool2D(pool_size=(3, 3), strides=(2, 2))(conv1)
 print "pool1 shape:", pool1.shape
 
 conv2 = Conv2D(filters=256, kernel_size=(5, 5), padding='same')(pool1)
 conv2 = keras_batchnormalization_relu(conv2)
 print "conv2 shape:", conv2.shape
 pool2 = MaxPool2D(pool_size=(3, 3), strides=(2, 2))(conv2)
 print "pool2 shape:", pool2.shape
 
 conv3 = Conv2D(filters=384, kernel_size=(3, 3), padding='same')(pool2)
 conv3 = PReLU()(conv3)
 print "conv3 shape:", conv3.shape
 
 conv4 = Conv2D(filters=384, kernel_size=(3, 3), padding='same')(conv3)
 conv4 = PReLU()(conv4)
 print "conv4 shape:", conv4
 
 conv5 = Conv2D(filters=256, kernel_size=(3, 3), padding='same')(conv4)
 conv5 = PReLU()(conv5)
 print "conv5 shape:", conv5
 
 pool3 = MaxPool2D(pool_size=(3, 3), strides=(2, 2))(conv5)
 print "pool3 shape:", pool3.shape
 
 dense1 = Flatten()(pool3)
 dense1 = Dense(4096, activation='relu')(dense1)
 print "dense2 shape:", dense1
 dense1 = Dropout(prob)(dense1)
 # print "dense1 shape:", dense1
 
 dense2 = Dense(4096, activation='relu')(dense1)
 print "dense2 shape:", dense2
 dense2 = Dropout(prob)(dense2)
 # print "dense2 shape:", dense2
 
 predict= Dense(classes, activation='softmax')(dense2)
 
 model = Model(inputs=inputs, outputs=predict)
 return model
 
def vgg13(resize=224, classes=2, prob=0.5):
 model = Sequential()
 model.add(Conv2D(64, (3, 3), strides=(1, 1), input_shape=(resize, resize, 3), padding='same', activation='relu',
      kernel_initializer='uniform'))
 model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(128, (3, 2), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Flatten())
 model.add(Dense(4096, activation='relu'))
 model.add(Dropout(prob))
 model.add(Dense(4096, activation='relu'))
 model.add(Dropout(prob))
 model.add(Dense(classes, activation='softmax'))
 return model
 
def vgg16(resize=224, classes=2, prob=0.5):
 model = Sequential()
 model.add(Conv2D(64, (3, 3), strides=(1, 1), input_shape=(resize, resize, 3), padding='same', activation='relu',
      kernel_initializer='uniform'))
 model.add(Conv2D(64, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(128, (3, 2), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(128, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(256, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(Conv2D(512, (3, 3), strides=(1, 1), padding='same', activation='relu', kernel_initializer='uniform'))
 model.add(MaxPooling2D(pool_size=(2, 2)))
 model.add(Flatten())
 model.add(Dense(4096, activation='relu'))
 model.add(Dropout(prob))
 model.add(Dense(4096, activation='relu'))
 model.add(Dropout(prob))
 model.add(Dense(classes, activation='softmax'))
 return model

另外有需要云服务器可以了解下创新互联scvps.cn,海内外云服务器15元起步,三天无理由+7*72小时售后在线,公司持有idc许可证,提供“云服务器、裸金属服务器、高防服务器、香港服务器、美国服务器、虚拟主机、免备案服务器”等云主机租用服务以及企业上云的综合解决方案,具有“安全稳定、简单易用、服务可用性高、性价比高”等特点与优势,专为企业上云打造定制,能够满足用户丰富、多元化的应用场景需求。


标题名称:keras实现多种分类网络的方法-创新互联
文章来源:http://cdxtjz.com/article/cehcoh.html

其他资讯