|
|
@@ -0,0 +1,41 @@ |
|
|
from keras.models import Sequential |
|
|
from keras.layers.core import Dense, Dropout, Activation, Flatten |
|
|
from keras.layers.convolutional import Convolution2D, MaxPooling2D |
|
|
from keras.layers.normalization import BatchNormalization |
|
|
|
|
|
#AlexNet with batch normalization in Keras |
|
|
#input image is 224x224 |
|
|
|
|
|
model = Sequential() |
|
|
model.add(Convolution2D(64, 3, 11, 11, border_mode='full')) |
|
|
model.add(BatchNormalization((64,226,226))) |
|
|
model.add(Activation('relu')) |
|
|
model.add(MaxPooling2D(poolsize=(3, 3))) |
|
|
|
|
|
model.add(Convolution2D(128, 64, 7, 7, border_mode='full')) |
|
|
model.add(BatchNormalization((128,115,115))) |
|
|
model.add(Activation('relu')) |
|
|
model.add(MaxPooling2D(poolsize=(3, 3))) |
|
|
|
|
|
model.add(Convolution2D(192, 128, 3, 3, border_mode='full')) |
|
|
model.add(BatchNormalization((128,112,112))) |
|
|
model.add(Activation('relu')) |
|
|
model.add(MaxPooling2D(poolsize=(3, 3))) |
|
|
|
|
|
model.add(Convolution2D(256, 192, 3, 3, border_mode='full')) |
|
|
model.add(BatchNormalization((128,108,108))) |
|
|
model.add(Activation('relu')) |
|
|
model.add(MaxPooling2D(poolsize=(3, 3))) |
|
|
|
|
|
model.add(Flatten()) |
|
|
model.add(Dense(12*12*256, 4096, init='normal')) |
|
|
model.add(BatchNormalization(4096)) |
|
|
model.add(Activation('relu')) |
|
|
model.add(Dense(4096, 4096, init='normal')) |
|
|
model.add(BatchNormalization(4096)) |
|
|
model.add(Activation('relu')) |
|
|
model.add(Dense(4096, 1000, init='normal')) |
|
|
model.add(BatchNormalization(1000)) |
|
|
model.add(Activation('softmax')) |
|
|
|
|
|
|