1

I am trying to select a subset of the trainable variables, the ones in the first and second hidden layer, to perform some operations with their gradients (for example, clipping the gradient) using get_collection along with the scope name in scope. However, for some reason, Tensorflow does not find any variables to optimize. Below, I copy a reproducible example that replicates my error. I think maybe it has to do with how I enter the scope in get_collection:

import tensorflow as tf
from tensorflow.contrib.layers import fully_connected

X = tf.placeholder(dtype=tf.float32, shape=(None, 50), name='X')
Y = tf.placeholder(dtype= tf.float32, shape= (None), name = 'Y')

with tf.name_scope('DNN') as scope:
    hidden1 = fully_connected(X, 20, scope= 'hidden1' )
    hidden2 = fully_connected(hidden1, 10, scope= 'hidden2')
    hidden3 = fully_connected(hidden2, 5, scope= 'hidden3')
    output = fully_connected(hidden3, 1, activation_fn= None ,scope = 'outs')
    logits = tf.nn.sigmoid(output, name= 'logists_out')

with tf.name_scope('loss') as scope:
    loss = tf.reduce_mean(Y - logits)

learning_rate = 0.01
with tf.name_scope('train') as scope:
    optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
    train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
    grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
    clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
    train_op = optimizer.apply_gradients(clipped_grads)


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-6-a8007f96c949> in <module>()
     16     optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
     17     train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
---> 18     grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
     19     clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
     20     train_op = optimizer.apply_gradients(clipped_grads)

/home/diego/Documents/python27/ML/local/lib/python2.7/site-packages/tensorflow/python/training/optimizer.pyc in compute_gradients(self, loss, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, grad_loss)
    406     processors = [_get_processor(v) for v in var_list]
    407     if not var_list:
--> 408       raise ValueError("No variables to optimize.")
    409     var_refs = [p.target() for p in processors]
    410     grads = gradients.gradients(

ValueError: No variables to optimize.

Note: I get the same error even if I use dense from layers rather than fully_connected in contrib:

with tf.name_scope('DNN') as scope:
    hidden1 = tf.layers.dense(X, 20, name= 'hidden1', activation= tf.nn.relu )
    hidden2 = tf.layers.dense(hidden1, 10, name= 'hidden2', activation= tf.nn.relu)
    hidden3 = tf.layers.dense(hidden2, 5, name= 'hidden3', activation= tf.nn.relu)
    output = tf.layers.dense(hidden3, 1, activation= None ,name = 'outs')
    logits = tf.nn.sigmoid(output, name= 'logists_out')

with tf.name_scope('loss') as scope:
    #loss = tf.reduce_mean(Y - logits)
    xentropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=Y, logits=logits)
    loss = tf.reduce_mean(xentropy, name="loss")


learning_rate = 0.01
with tf.name_scope('train') as scope:
    optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
    train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
    grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
    clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
    train_op = optimizer.apply_gradients(clipped_grads)

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-37-c30495bc394c> in <module>()
     19     optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
     20     train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
---> 21     grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
     22     clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
     23     train_op = optimizer.apply_gradients(clipped_grads)

/home/diego/Documents/python27/ML/local/lib/python2.7/site-packages/tensorflow/python/training/optimizer.pyc in compute_gradients(self, loss, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, grad_loss)
    406     processors = [_get_processor(v) for v in var_list]
    407     if not var_list:
--> 408       raise ValueError("No variables to optimize.")
    409     var_refs = [p.target() for p in processors]
    410     grads = gradients.gradients(

ValueError: No variables to optimize.
dleal
  • 2,244
  • 6
  • 27
  • 49
  • Why do you define a scope = '[12]' for train_vars? – J.E.K Jan 24 '18 at 21:48
  • Because I just want to train the variables on hidden layers 1 and 2, although maybe i should hav written `hidden[12]` which is the name of the hidden layers – dleal Jan 24 '18 at 22:40

1 Answers1

4

If you write

print train_vars

you would see, that this collection is indeed empty. You can get all variables by

print tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES)

To get a subset I suggest to do

filter_vars = ['hidden1', 'hidden2']
train_vars = []
for var in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
    for filter_var in filter_vars:
        if filter_var in var.name:
            train_vars.append(var)
# print train_vars

or

train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="hidden1")
train_vars += tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope="hidden2")
# print train_vars

Both gives you:

[<tf.Variable 'hidden1/weights:0' shape=(50, 20) dtype=float32_ref>, 
 <tf.Variable 'hidden1/biases:0' shape=(20,) dtype=float32_ref>, 
 <tf.Variable 'hidden2/weights:0' shape=(20, 10) dtype=float32_ref>, 
 <tf.Variable 'hidden2/biases:0' shape=(10,) dtype=float32_ref>]

But my favorite one is using RegEx:

import re
regex_pattern = 'hidden[12]'
train_vars = []
for var in tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES):
    if re.match(regex_pattern, var.op.name):
        train_vars.append(var)
print train_vars
Patwie
  • 4,360
  • 1
  • 21
  • 41
  • thank you, this was I suspected. I should have typed `hidden[12]` instead of just `[12]`. Good answer – dleal Jan 25 '18 at 19:18