0

I want to add additional ROC and AUC custom metrics to my Keras model. I tried to adapt the solution provided here but the code gave me an error.

The PerformanceVisualizationCallback plots the ROC-AUC curve.

from sklearn.metrics import roc_auc_score
from keras.callbacks import Callback

Callback:

class PerformanceVisualizationCallback(Callback):
    def __init__(self, model, validation_data, dat_dir):
        super().__init__()
        self.model = model
        self.X = X_train
        self.y = y_train
        self.validation_data = validation_data

        os.makedirs(dat_dir, exist_ok=True)
        self.dat_dir = dat_dir

    def on_epoch_end(self, epoch, logs={}):
        y_pred_train = self.model.predict_proba(self.x)
        roc_train = auc_roc_score(self.y, y_pred_train)
        y_pred_val = self.model.predict_proba(self.validation_data[0])
        roc_val = auc_roc_score(self.y_val, y_pred_val)
        
        #y_pred = tf.constant(self.model.predict(self.validation_data[0])).numpy()
        #y_true = self.validation_data[1]             
        #y_pred_class = tf.math.argmax(y_pred, axis=1).numpy()
        
        clf = SVC(random_state=0)
        clf.fit(tf.constant( self.validation_data).numpy(), tf.cast( tf.linspace( 0, 19, 20, name='linspace', axis=0 ), dtype=tf.int64 ).numpy())
        predictions = clf.predict(tf.constant( self.validation_data).numpy())
        
        cm = sklearn.metrics.confusion_matrix(
                [tf.math.argmax(self.validation_data[1], axis=1).numpy()[0], tf.math.argmax(self.validation_data[2], axis=1).numpy()[0], 
                tf.math.argmax(self.validation_data[3], axis=1).numpy()[0], tf.math.argmax(self.validation_data[4], axis=1).numpy()[0], tf.math.argmax(self.validation_data[5], axis=1).numpy()[0]], 
                [1, 2, 3, 4, 5], labels=clf.classes_)
        disp = sklearn.metrics.ConfusionMatrixDisplay(confusion_matrix=cm, display_labels=clf.classes_)
        disp.plot()
        plt.show()
        fig.savefig(os.path.join(self.dat_dir, f'confusion_matrix_epoch_{epoch}'))

        clf = sklearn.svm.SVC(random_state=0)
        clf.fit(tf.constant( self.validation_data).numpy(), tf.linspace( 0, 19, 20, name='linspace', axis=0 ).numpy())
        
        
        fpr, tpr, thresholds = sklearn.metrics.roc_curve([0, 0, 1, 1], [0, list_auc_roc[0], list_auc_roc[1], list_auc_roc[2]])
        auc_roc = sklearn.metrics.auc(fpr, tpr)
        display = sklearn.metrics.RocCurveDisplay(fpr=fpr, tpr=tpr, roc_auc=auc_roc, estimator_name='example estimator')
        display.plot()
        plt.show()
        fig.savefig(os.path.join(self.dat_dir, f'roc_curve_epoch_{epoch}'))
            
        fig, ax = plt.subplots(figsize=(8,4))
        plt.scatter(y_val, y_pred_val, alpha=0.6, color='#FF0000', lw=1, ec='black')
        plt.show()
        fig.savefig(os.path.join(self.dat_dir, f'dl_scatterplot'))

Build the model:

def model_builder(hp):
    
  model = Sequential()

  for i in range(hp.Int("num_layers", 1, 50)):
        model.add(
        layers.Dense(
        # Tune number of units separately.
        units=hp.Int(f"units_{i}", min_value=1, max_value=200, step=5),
        #activation=hp.Choice("activation", ["relu", "tanh"])
        activation=hp.Choice("activation", ["relu", "tanh", "sigmoid", "softmax", "softplus", "softsign", "selu", "elu", "exponential"])
        ))
            

  model.add(Dense(4, kernel_initializer='normal', activation='linear')) # output layer


  if hp.Boolean("dropout"):
        model.add(layers.Dropout(rate=0.1))
  model.add(layers.Dense(10, activation="softmax"))


  # Tune the learning rate for the optimizer
  # Choose an optimal value from 0.01, 0.001, or 0.0001
  hp_learning_rate = hp.Choice('learning_rate', values=[1e-2, 1e-3, 1e-4])

  # Performance visualization callback
  performance_viz_cbk = PerformanceVisualizationCallback(
                                       model=model,
                                       validation_data=X_val,
                                       dat_dir='c:\performance_charts')

  model.compile(optimizer=Adam(learning_rate=hp_learning_rate),
                loss=SparseCategoricalCrossentropy(from_logits=True),
                metrics=['auc_roc'])
  
  return model

Search for optimal hyperparameters:

tuner = kt.Hyperband(model_builder,
                     objective=kt.Objective('val_auc', direction='max'),
                     max_epochs=200,
                     factor=3,
                     directory='my_dir',
                     overwrite=True, 
                     project_name='intro_to_kt')

stop_early = EarlyStopping(monitor='auc_roc', patience=5)

tuner.search(X_train, y_train, epochs=10, validation_split=0.2, callbacks=[stop_early])

Traceback:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
/tmp/ipykernel_11015/3831715835.py in <module>
----> 1 tuner.search(X_train, y_train, epochs=10, validation_split=0.2, callbacks=[stop_early])

/opt/conda/lib/python3.7/site-packages/keras_tuner/engine/base_tuner.py in search(self, *fit_args, **fit_kwargs)
    181 
    182             self.on_trial_begin(trial)
--> 183             results = self.run_trial(trial, *fit_args, **fit_kwargs)
    184             # `results` is None indicates user updated oracle in `run_trial()`.
    185             if results is None:

/opt/conda/lib/python3.7/site-packages/keras_tuner/tuners/hyperband.py in run_trial(self, trial, *fit_args, **fit_kwargs)
    382             fit_kwargs["epochs"] = hp.values["tuner/epochs"]
    383             fit_kwargs["initial_epoch"] = hp.values["tuner/initial_epoch"]
--> 384         return super(Hyperband, self).run_trial(trial, *fit_args, **fit_kwargs)
    385 
    386     def _build_model(self, hp):

/opt/conda/lib/python3.7/site-packages/keras_tuner/engine/tuner.py in run_trial(self, trial, *args, **kwargs)
    293             callbacks.append(model_checkpoint)
    294             copied_kwargs["callbacks"] = callbacks
--> 295             obj_value = self._build_and_fit_model(trial, *args, **copied_kwargs)
    296 
    297             histories.append(obj_value)

/opt/conda/lib/python3.7/site-packages/keras_tuner/engine/tuner.py in _build_and_fit_model(self, trial, *args, **kwargs)
    220         hp = trial.hyperparameters
    221         model = self._try_build(hp)
--> 222         results = self.hypermodel.fit(hp, model, *args, **kwargs)
    223         tuner_utils.validate_trial_results(
    224             results, self.oracle.objective, "HyperModel.fit()"

/opt/conda/lib/python3.7/site-packages/keras_tuner/engine/hypermodel.py in fit(self, hp, model, *args, **kwargs)
    138             If return a float, it should be the `objective` value.
    139         """
--> 140         return model.fit(*args, **kwargs)
    141 
    142 

/opt/conda/lib/python3.7/site-packages/keras/utils/traceback_utils.py in error_handler(*args, **kwargs)
     68             # To get the full stack trace, call:
     69             # `tf.debugging.disable_traceback_filtering()`
---> 70             raise e.with_traceback(filtered_tb) from None
     71         finally:
     72             del filtered_tb

/opt/conda/lib/python3.7/site-packages/keras/engine/training.py in tf__train_function(iterator)
     13                 try:
     14                     do_return = True
---> 15                     retval_ = ag__.converted_call(ag__.ld(step_function), (ag__.ld(self), ag__.ld(iterator)), None, fscope)
     16                 except:
     17                     do_return = False

ValueError: in user code:

    File "/opt/conda/lib/python3.7/site-packages/keras/engine/training.py", line 1160, in train_function  *
        return step_function(self, iterator)
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/training.py", line 1146, in step_function  **
        outputs = model.distribute_strategy.run(run_step, args=(data,))
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/training.py", line 1135, in run_step  **
        outputs = model.train_step(data)
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/training.py", line 998, in train_step
        return self.compute_metrics(x, y, y_pred, sample_weight)
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/training.py", line 1092, in compute_metrics
        self.compiled_metrics.update_state(y, y_pred, sample_weight)
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/compile_utils.py", line 577, in update_state
        self.build(y_pred, y_true)
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/compile_utils.py", line 484, in build
        y_pred, self._get_metric_objects, self._metrics, y_true, y_pred
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/compile_utils.py", line 631, in _get_metric_objects
        return [self._get_metric_object(m, y_t, y_p) for m in metrics]
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/compile_utils.py", line 631, in <listcomp>
        return [self._get_metric_object(m, y_t, y_p) for m in metrics]
    File "/opt/conda/lib/python3.7/site-packages/keras/engine/compile_utils.py", line 650, in _get_metric_object
        metric_obj = metrics_mod.get(metric)
    File "/opt/conda/lib/python3.7/site-packages/keras/metrics/__init__.py", line 181, in get
        return deserialize(str(identifier))
    File "/opt/conda/lib/python3.7/site-packages/keras/metrics/__init__.py", line 140, in deserialize
        printable_module_name="metric function",
    File "/opt/conda/lib/python3.7/site-packages/keras/utils/generic_utils.py", line 770, in deserialize_keras_object
        f"Unknown {printable_module_name}: {object_name}. Please "

    ValueError: Unknown metric function: auc_roc. Please ensure this object is passed to the `custom_objects` argument. See https://www.tensorflow.org/guide/keras/save_and_serialize#registering_the_custom_object for details.
melolilili
  • 239
  • 1
  • 8

0 Answers0