Skip to content

Commit 2072e89

Browse files
committedMay 12, 2017
updated
1 parent 14fdf37 commit 2072e89

File tree

1 file changed

+13
-13
lines changed

1 file changed

+13
-13
lines changed
 

‎kerasTUT/6-CNN_example.py

+13-13
Original file line numberDiff line numberDiff line change
@@ -37,29 +37,29 @@
3737

3838
# Conv layer 1 output shape (32, 28, 28)
3939
model.add(Convolution2D(
40-
nb_filter=32,
41-
nb_row=5,
42-
nb_col=5,
43-
border_mode='same', # Padding method
44-
dim_ordering='th', # if use tensorflow, to set the input dimension order to theano ("th") style, but you can change it.
45-
input_shape=(1, # channels
46-
28, 28,) # height & width
40+
batch_input_shape=(64, 1, 28, 28),
41+
filters=32,
42+
kernel_size=5,
43+
strides=1,
44+
padding='same', # Padding method
45+
data_format='channels_first',
4746
))
4847
model.add(Activation('relu'))
4948

5049
# Pooling layer 1 (max pooling) output shape (32, 14, 14)
5150
model.add(MaxPooling2D(
52-
pool_size=(2, 2),
53-
strides=(2, 2),
54-
border_mode='same', # Padding method
51+
pool_size=2,
52+
strides=2,
53+
padding='same', # Padding method
54+
data_format='channels_first',
5555
))
5656

5757
# Conv layer 2 output shape (64, 14, 14)
58-
model.add(Convolution2D(64, 5, 5, border_mode='same'))
58+
model.add(Convolution2D(64, 5, strides=1, padding='same', data_format='channels_first'))
5959
model.add(Activation('relu'))
6060

6161
# Pooling layer 2 (max pooling) output shape (64, 7, 7)
62-
model.add(MaxPooling2D(pool_size=(2, 2), border_mode='same'))
62+
model.add(MaxPooling2D(2, 2, 'same', data_format='channels_first'))
6363

6464
# Fully connected layer 1 input shape (64 * 7 * 7) = (3136), output shape (1024)
6565
model.add(Flatten())
@@ -80,7 +80,7 @@
8080

8181
print('Training ------------')
8282
# Another way to train the model
83-
model.fit(X_train, y_train, epochs=1, batch_size=32,)
83+
model.fit(X_train, y_train, epochs=1, batch_size=64,)
8484

8585
print('\nTesting ------------')
8686
# Evaluate the model with the metrics we defined earlier

0 commit comments

Comments
 (0)
Please sign in to comment.