I want to create a custom layer that takes in __init__
a internal tensor and a custom dot function so that it computes for a given batch the dot function over all possible pairs made with the batch and the internal tensor.
If I were to use the natural inner product, I could write directly tf.matmul(inputs, self.internal_tensor, transpose_b=True)
but I want to be able to give other kernel methods.
MWE:
import numpy as np
import tensorflow as tf
from tensorflow.keras.layers import Layer
class CustomLayer(Layer):
def __init__(self, internal_tensor, kernel, **kwargs):
super().__init__(**kwargs)
self.internal_tensor = tf.Variable(0., shape=tf.TensorShape((None, 10)), validate_shape=False, name='internal_tensor')
self.internal_tensor.assign(internal_tensor)
self.kernel = kernel
@tf.function
def call(self, inputs, **kwargs):
return self.kernel([
tf.reshape(tf.tile(inputs, [1, self.internal_tensor.shape[0]]), [-1, inputs.shape[1]]), # because no tf.repeat
tf.tile(self.support_tensors, [inputs.shape[0], 1]),
])
custom_layer = CustomLayer(
internal_tensor=tf.convert_to_tensor(np.random.rand(30, 10), tf.float32),
kernel=lambda inputs: inputs[0] + inputs[1],
)
x = np.random.rand(15, 10).astype(np.float32)
custom_layer(x)
# TypeError: Failed to convert object of type <class 'list'> to Tensor. Contents: [1, None]. Consider casting elements to a supported type.
For the sake of clarity, here is the target working layer in Numpy:
class NumpyLayer:
def __init__(self, internal_tensor, kernel):
self.internal_tensor = internal_tensor
self.kernel = kernel
def __call__(self, inputs):
return self.kernel([
np.repeat(inputs, len(self.internal_tensor), axis=0),
np.tile(self.internal_tensor, (len(inputs), 1)),
])
numpy_layer = NumpyLayer(
internal_tensor=internal_tensor,
kernel=lambda inputs: inputs[0] + inputs[1],
)
numpy_layer(x)