training.models: use asymmetric zero padding instead of lambda layer

This commit is contained in:
Robert Sachunsky 2026-02-08 01:10:56 +01:00
parent ee4bffd81d
commit 7b7ef041ec

View file

@ -69,16 +69,9 @@ def mlp(x, hidden_units, dropout_rate):
return x return x
def one_side_pad(x): def one_side_pad(x):
# rs: fixme: lambda layers are problematic for de/serialization! x = ZeroPadding2D(((1, 0), (1, 0)), data_format=IMAGE_ORDERING)(x)
# - can we use ZeroPadding1D instead of ZeroPadding2D+Lambda?
x = ZeroPadding2D((1, 1), data_format=IMAGE_ORDERING)(x)
if IMAGE_ORDERING == 'channels_first':
x = Lambda(lambda x: x[:, :, :-1, :-1])(x)
elif IMAGE_ORDERING == 'channels_last':
x = Lambda(lambda x: x[:, :-1, :-1, :])(x)
return x return x
def identity_block(input_tensor, kernel_size, filters, stage, block): def identity_block(input_tensor, kernel_size, filters, stage, block):
"""The identity block is the block that has no conv layer at shortcut. """The identity block is the block that has no conv layer at shortcut.
# Arguments # Arguments