Skip to content

Commit

Permalink
Merge pull request #118 from lvapeab/master
Browse files Browse the repository at this point in the history
Written some docs/fixed some code flaws
lvapeab authored Jul 17, 2017

Verified

This commit was created on GitHub.com and signed with GitHub’s verified signature.
2 parents 29e4ea0 + c3db88d commit 306fd4a
Showing 3 changed files with 145 additions and 48 deletions.
4 changes: 1 addition & 3 deletions keras_wrapper/cnn_model.py
Original file line number Diff line number Diff line change
@@ -3350,7 +3350,7 @@ def conv_layer(self, x, nb_filter, nb_row, nb_col, dim_ordering,
dim_ordering=dim_ordering)(x)

if padding:
for i in range(padding):
for _ in range(padding):
x = ZeroPadding2D(padding=(1, 1), dim_ordering=dim_ordering)(x)

return x
@@ -3367,8 +3367,6 @@ def GoogLeNet_FunctionalAPI(self, nOutput, input):
CONCAT_AXIS = 1
NB_CLASS = nOutput # number of classes (default 1000)
DROPOUT = 0.4
WEIGHT_DECAY = 0.0005 # L2 regularization factor
USE_BN = True # whether to use batch normalization
# Theano - 'th' (channels, width, height)
# Tensorflow - 'tf' (width, height, channels)
DIM_ORDERING = 'th'
5 changes: 5 additions & 0 deletions keras_wrapper/extra/regularize.py
Original file line number Diff line number Diff line change
@@ -19,6 +19,11 @@ def Regularize(layer, params,
:param params: Params specifying the regularizations to apply
:param shared_layers: Boolean indicating if we want to get the used layers for applying to a shared-layers model.
:param name: Name prepended to regularizer layer
:param apply_noise: If False, noise won't be applied, independently of params
:param apply_dropout: If False, dropout won't be applied, independently of params
:param apply_prelu: If False, prelu won't be applied, independently of params
:param apply_batch_normalization: If False, batch normalization won't be applied, independently of params
:param apply_l2: If False, l2 normalization won't be applied, independently of params
:return: Regularized layer
"""
shared_layers_list = []
Loading

0 comments on commit 306fd4a

Please sign in to comment.