class fc_JLT(layers.Layer):
def __init__(self, y, units, activation, sketchrate):
super(fc_JLT, self).__init__()
self.input_dim = y.get_shape()[-1]
self.activation = activation
self.sketchrate = sketchrate
self.w = self.add_weight(
name = 'w',
shape = (self.input_dim, units),
initializer = 'random_normal',
trainable = True,
)
self.b = self.add_weight(
name='b',
shape = (units, ),
initializer='zeros',
trainable = True,
)
def call(self, inputs):
if self.sketchrate == 1:
return tf.matmul(inputs, self.w) + self.b
else:
S = generate_JLTMatrix(self.input_dim, self.sketchrate)
P1 = tf.matmul(inputs, S)
print(P1.get_shape,P2.get_shape)
P2 = tf.matmul(S.T,self.w)
return tf.matmul(P1, P2) + self.b
def get_config(self):
config = super().get_config().copy()
return config
I wrote a custom layer in Keras as a modified version for a fully connected layer with extra parameters. The only difference is I multiply the weight matrix with a JLTMatrix. However I only want this multiplication happened in training procedure but not in prediction(or evaluation) procedure(which means in training forward propagation is WSS^Tx+b but in prediction Wx+b), I know that for built-in layer like dropout or batch normalization you can set parameter training=true to toggle that but I am new to Keras and IDK how could I enable that for the custom layer.
question from:
https://stackoverflow.com/questions/65599088/toggle-traning-true-parameter-in-keras-for-custom-layer 与恶龙缠斗过久,自身亦成为恶龙;凝视深渊过久,深渊将回以凝视…