Python keras.regularizers 模块,L1L2 实例源码
我们从Python开源项目中,提取了以下17个代码示例,用于说明如何使用keras.regularizers.L1L2。
def __call__(self, inputs):
x = self._merge_inputs(inputs)
shape = getattr(x, '_keras_shape')
replicate_model = self._replicate_model(kl.Input(shape=shape[2:]))
x = kl.Timedistributed(replicate_model)(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Bidirectional(kl.GRU(128, kernel_regularizer=kernel_regularizer,
return_sequences=True),
merge_mode='concat')(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
gru = kl.GRU(256, kernel_regularizer=kernel_regularizer)
x = kl.Bidirectional(gru)(x)
x = kl.Dropout(self.dropout)(x)
return self._build(inputs, x)
def __call__(self, inputs):
x = inputs[0]
kernel_regularizer = kr.L1L2(self.l1_decay, self.l2_decay)
x = kl.Conv1D(128, 11,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.MaxPooling1D(4)(x)
x = kl.Flatten()(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Dense(self.nb_hidden,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.Dropout(self.dropout)(x)
return self._build(inputs, inputs):
x = inputs[0]
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(128,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.MaxPooling1D(4)(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(256, 7, l2=self.l2_decay)
gru = kl.recurrent.GRU(256, x)
def BiGRU(X_train, y_train, X_test, y_test, gru_units, dense_units, input_shape, \
batch_size, epochs, drop_out, patience):
model = Sequential()
reg = L1L2(l1=0.2, l2=0.2)
model.add(Bidirectional(GRU(units = gru_units, dropout= drop_out, activation='relu', recurrent_regularizer = reg,
return_sequences = True),
input_shape = input_shape,
merge_mode="concat"))
model.add(Batchnormalization())
model.add(Timedistributed(Dense(dense_units, activation='relu')))
model.add(Batchnormalization())
model.add(Bidirectional(GRU(units = gru_units, recurrent_regularizer=reg,
return_sequences = True),
merge_mode="concat"))
model.add(Batchnormalization())
model.add(Dense(units=1))
model.add(GlobalAveragePooling1D())
print(model.summary())
early_stopping = EarlyStopping(monitor="val_loss", patience = patience)
model.compile(loss='mse', optimizer= 'adam')
history_callback = model.fit(X_train, batch_size=batch_size, epochs=epochs,\
verbose=2, callbacks=[early_stopping], validation_data=[X_test, y_test], shuffle = True)
return model, history_callback
def l1l2(l1_weight=0, l2_weight=0):
if keras_2:
from keras.regularizers import L1L2
return L1L2(l1_weight, l2_weight)
else:
from keras.regularizers import l1l2
return l1l2(l1_weight, l2_weight)
def __call__(self, 3,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.MaxPooling1D(2)(x)
x = kl.Flatten()(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu')(x)
x = kl.MaxPooling1D(2)(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(512,
name='conv1',
kernel_regularizer=kernel_regularizer)(x)
x = kl.Batchnormalization(name='bn1')(x)
x = kl.Activation('relu', name='act1')(x)
x = kl.MaxPooling1D(2, name='pool1')(x)
# 124
x = self._res_unit(x, [32, 32, 128], stage=1, block=1, stride=2)
x = self._res_unit(x, block=2)
x = self._res_unit(x, block=3)
# 64
x = self._res_unit(x, [64, 64, 256], stage=2, block=3)
# 32
x = self._res_unit(x, [128, 128, 512], stage=3, block=3)
# 16
x = self._res_unit(x, [256, 256, 1024], stage=4, stride=2)
x = kl.GlobalAveragePooling1D()(x)
x = kl.Dropout(self.dropout)(x)
return self._build(inputs, x)
def _res_unit(self, inputs, nb_filter, size=3, stride=1, block=1):
name = '%02d-%02d/' % (stage, block)
id_name = '%sid_' % (name)
res_name = '%sres_' % (name)
# Residual branch
x = kl.Batchnormalization(name=res_name + 'bn1')(inputs)
x = kl.Activation('relu', name=res_name + 'act1')(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(nb_filter, size,
name=res_name + 'conv1',
border_mode='same',
subsample_length=stride,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Batchnormalization(name=res_name + 'bn2')(x)
x = kl.Activation('relu', name=res_name + 'act2')(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay,
name=res_name + 'conv2',
kernel_regularizer=kernel_regularizer)(x)
# Identity branch
if nb_filter != inputs._keras_shape[-1] or stride > 1:
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
identity = kl.Conv1D(nb_filter,
name=id_name + 'conv1',
border_mode='same',
subsample_length=stride,
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(inputs)
else:
identity = inputs
x = kl.merge([identity, x], name=name + 'merge', mode='sum')
return x
def __call__(self, block=2)
# 64
x = self._res_unit(x, stride=2)
# 32
x = self._res_unit(x, 512,
kernel_regularizer=kernel_regularizer)(x)
x = kl.Activation('relu', atrous=2, atrous=4, x)
def _replicate_model(self, input):
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Dense(512, kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(input)
x = kl.Activation('relu')(x)
return km.Model(input, l2=self.l2_decay)
x = kl.Dense(256,
kernel_regularizer=kernel_regularizer)(input)
x = kl.Activation(self.act_replicate)(x)
return km.Model(input, x)
def denseNet(input_dim):
base_model = densenet.DenseNet(input_shape=(input_dim, input_dim, 3), classes=17, dropout_rate=0.2, weights=None, include_top=False)
x = Dense(17, activation='softmax', kernel_regularizer=regularizers.L1L2(l2=1E-4),
bias_regularizer=regularizers.L1L2(l2=1E-4))(base_model.output)
model = Model(inputs=base_model.input, outputs=x)
# Load model
weights_file = "../weights/DenseNet-40-12CIFAR10-tf.h5"
if os.path.exists(weights_file):
model.load_weights(weights_file)
print("Model loaded.")
return model
def _res_unit(self, block)
id_name = '%sid_' % (name)
res_name = '%sres_' % (name)
# Residual branch
# 1x1 down-sample conv
x = kl.Batchnormalization(name=res_name + 'bn1')(inputs)
x = kl.Activation('relu', l2=self.l2_decay)
x = kl.Conv1D(nb_filter[0], 1,
kernel_regularizer=kernel_regularizer)(x)
# LxL conv
x = kl.Batchnormalization(name=res_name + 'bn2')(x)
x = kl.Activation('relu', l2=self.l2_decay)
x = kl.Conv1D(nb_filter[1],
kernel_regularizer=kernel_regularizer)(x)
# 1x1 up-sample conv
x = kl.Batchnormalization(name=res_name + 'bn3')(x)
x = kl.Activation('relu', name=res_name + 'act3')(x)
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
x = kl.Conv1D(nb_filter[2],
name=res_name + 'conv3',
kernel_regularizer=kernel_regularizer)(x)
# Identity branch
if nb_filter[-1] != inputs._keras_shape[-1] or stride > 1:
kernel_regularizer = kr.L1L2(l1=self.l1_decay, l2=self.l2_decay)
identity = kl.Conv1D(nb_filter[2], mode='sum')
return x
def _res_unit(self, atrous=1,
stage=1, l2=self.l2_decay)
x = kl.AtrousConv1D(nb_filter[1],
atrous_rate=atrous,
name=res_name + 'conv2',
border_mode='same',
kernel_initializer=self.init,
kernel_regularizer=kernel_regularizer)(x)
# 1x1 up-sample conv
x = kl.Batchnormalization(name=res_name + 'bn3')(x)
x = kl.Activation('relu', mode='sum')
return x