8

I'm trying to access predictions of intermediate layers of a model during training using custom callback. Following stripped down version of the actual code demonstrates the issue.

import tensorflow as tf
import numpy as np

class Model(tf.keras.Model):
    def __init__(self, input_shape=None, name="cus_model", **kwargs):
        super(Model, self).__init__(name=name, **kwargs)
        
    def build(self, input_shape):
        self.dense1 = tf.keras.layers.Dense(input_shape=input_shape, units=32)
        
    def call(self, input_tensor):
        return self.dense1(input_tensor)

class CustomCallback(tf.keras.callbacks.Callback):
    def on_epoch_end(self, epoch, logs=None):
        get_output = tf.keras.backend.function(
            inputs = self.model.layers[0].input,
            outputs = self.model.layers[0].output
        )
        print("Layer output: ",get_output.outputs)

X = np.ones((8,16))
y = np.sum(X, axis=1)

model = Model()
model.compile(optimizer='adam',loss='mean_squared_error', metrics='accuracy')
model.fit(X,y, epochs=8, callbacks=[CustomCallback()])

The callback is written as suggested in this answer. Getting following error:

<ipython-input-3-635fd53dbffc> in on_epoch_end(self, epoch, logs)
     12     def on_epoch_end(self, epoch, logs=None):
     13         get_output = tf.keras.backend.function(
---> 14             inputs = self.model.layers[0].input,
     15             outputs = self.model.layers[0].output
     16         )
.
.
AttributeError: Layer dense is not connected, no input to return.

What's causing this? How to resolve it?

MasterJEET
  • 349
  • 3
  • 14
  • 2
    Functional approach as suggested by @Marco Cerliani seems to work fine, but I would like to know what's the issue with sub-classing approach presented in the question. – MasterJEET Jun 30 '20 at 20:14

3 Answers3

1

I have no problem running this:

import tensorflow as tf
import numpy as np

X = np.ones((8,16))
y = np.sum(X, axis=1)


class CustomCallback(tf.keras.callbacks.Callback):
    def on_epoch_end(self, epoch, logs=None):
        get_output = tf.keras.backend.function(
            inputs = self.model.layers[0].input,
            outputs = self.model.layers[1].output # return output of first dense
        )
        print("\nLayer output: ", get_output(X))


inp = tf.keras.layers.Input((16,))
dense1 = tf.keras.layers.Dense(units=32)(inp)
dense2 = tf.keras.layers.Dense(units=20)(dense1)
model = tf.keras.Model(inp, dense2)

model.compile(optimizer='adam',loss='mean_squared_error', metrics='accuracy')
model.fit(X,y, epochs=8, callbacks=[CustomCallback()])
Marco Cerliani
  • 21,233
  • 3
  • 49
  • 54
1

I also cannot get the self.layers[0].input because of the same error, but maybe u can directly call function defined in Model like this:

class Model(tf.keras.Model):
    def __init__(self, input_shape=None, name="cus_model", **kwargs):
        super(Model, self).__init__(name=name, **kwargs)
        if not input_shape:
            input_shape = (10,)
        self.dense1 = tf.keras.layers.Dense(input_shape=input_shape, units=32)
        self.dev_dataset = np.ones((8,16))

    def call(self, input_tensor):
        return self.dense1(input_tensor)


class CustomCallback(tf.keras.callbacks.Callback):
    def on_epoch_end(self, epoch, logs=None):
        self.model.call(self.model.dev_dataset)


X = np.ones((8,16))
y = np.sum(X, axis=1)

model = Model()
model.compile(optimizer='adam',loss='mean_squared_error', metrics='accuracy')
model.fit(X,y, epochs=1, callbacks=[CustomCallback()])
Crystina
  • 990
  • 1
  • 5
  • 16
0

I guess there is some difference between Functional API and subclassing tf.keras.Model. Rewriting Dense would be also necessary. At least, it is now working. Anyone who knows why please explain.

import tensorflow as tf
import numpy as np

class Model(tf.keras.Model):
    def __init__(self, inputs=None, name="cus_model", **kwargs):
        super(Model, self).__init__(name=name, **kwargs)
        self.inputs= inputs
        self.output_dim = 8

    def build(self, input_shape):
        #tf.keras.layers.Dense(input_shape=input_shape, units=32)
        self.dense1 = self.add_weight(name='weight_vector', shape=(input_shape[1], self.output_dim),
                                      initializer='glorot_uniform', trainable=True,
                                      regularizer=None)

    def call(self, input_tensor):
        return tf.matmul(self.dense1, input_tensor)

class CustomCallback(tf.keras.callbacks.Callback):
    def on_epoch_end(self, epoch, logs=None):
        get_output = tf.keras.backend.function(
            inputs = self.model.layers[0].input,
            outputs = self.model.layers[0].output
        )
        print("Layer output: ",get_output.outputs)

X = np.ones((10,8,16))
y = np.sum(X, axis=1)

print(X.shape,y.shape)

inp = tf.keras.layers.Input((8, 16))
model = Model(inp)
# model.compile(optimizer='adam',loss='mean_squared_error', metrics='accuracy')
# model.fit(X,y, epochs=8, callbacks=[CustomCallback()])

optimizer = tf.keras.optimizers.Adam(learning_rate=0.001)
loss_fn = tf.keras.losses.BinaryCrossentropy(from_logits=False)

for i in range(X.shape[0]):
    with tf.GradientTape() as tape:
        out = model(X[i, :])
        label = tf.cast(y[i], dtype=tf.float32)
        loss = loss_fn(label, out)
        grads = tape.gradient(loss, model.trainable_weights)
        optimizer.apply_gradients(zip(grads, model.trainable_weights))
        print("step: ", i, "loss:", loss.numpy())
MeadowMuffins
  • 507
  • 1
  • 5
  • 20