I am trying to select a subset of the trainable variables, the ones in the first and second hidden layer, to perform some operations with their gradients (for example, clipping the gradient) using get_collection
along with the scope name in scope. However, for some reason, Tensorflow does not find any variables to optimize. Below, I copy a reproducible example that replicates my error. I think maybe it has to do with how I enter the scope
in get_collection
:
import tensorflow as tf
from tensorflow.contrib.layers import fully_connected
X = tf.placeholder(dtype=tf.float32, shape=(None, 50), name='X')
Y = tf.placeholder(dtype= tf.float32, shape= (None), name = 'Y')
with tf.name_scope('DNN') as scope:
hidden1 = fully_connected(X, 20, scope= 'hidden1' )
hidden2 = fully_connected(hidden1, 10, scope= 'hidden2')
hidden3 = fully_connected(hidden2, 5, scope= 'hidden3')
output = fully_connected(hidden3, 1, activation_fn= None ,scope = 'outs')
logits = tf.nn.sigmoid(output, name= 'logists_out')
with tf.name_scope('loss') as scope:
loss = tf.reduce_mean(Y - logits)
learning_rate = 0.01
with tf.name_scope('train') as scope:
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
train_op = optimizer.apply_gradients(clipped_grads)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-6-a8007f96c949> in <module>()
16 optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
17 train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
---> 18 grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
19 clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
20 train_op = optimizer.apply_gradients(clipped_grads)
/home/diego/Documents/python27/ML/local/lib/python2.7/site-packages/tensorflow/python/training/optimizer.pyc in compute_gradients(self, loss, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, grad_loss)
406 processors = [_get_processor(v) for v in var_list]
407 if not var_list:
--> 408 raise ValueError("No variables to optimize.")
409 var_refs = [p.target() for p in processors]
410 grads = gradients.gradients(
ValueError: No variables to optimize.
Note:
I get the same error even if I use dense
from layers rather than fully_connected
in contrib
:
with tf.name_scope('DNN') as scope:
hidden1 = tf.layers.dense(X, 20, name= 'hidden1', activation= tf.nn.relu )
hidden2 = tf.layers.dense(hidden1, 10, name= 'hidden2', activation= tf.nn.relu)
hidden3 = tf.layers.dense(hidden2, 5, name= 'hidden3', activation= tf.nn.relu)
output = tf.layers.dense(hidden3, 1, activation= None ,name = 'outs')
logits = tf.nn.sigmoid(output, name= 'logists_out')
with tf.name_scope('loss') as scope:
#loss = tf.reduce_mean(Y - logits)
xentropy = tf.nn.sparse_softmax_cross_entropy_with_logits(labels=Y, logits=logits)
loss = tf.reduce_mean(xentropy, name="loss")
learning_rate = 0.01
with tf.name_scope('train') as scope:
optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
train_op = optimizer.apply_gradients(clipped_grads)
---------------------------------------------------------------------------
ValueError Traceback (most recent call last)
<ipython-input-37-c30495bc394c> in <module>()
19 optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)
20 train_vars = tf.get_collection(tf.GraphKeys.TRAINABLE_VARIABLES, scope= '[12]')
---> 21 grads_and_vars = optimizer.compute_gradients(loss, var_list = train_vars)
22 clipped_grads = [(tf.clip_by_value(grad,-1,1), var) for grad, var in grads_and_vars]
23 train_op = optimizer.apply_gradients(clipped_grads)
/home/diego/Documents/python27/ML/local/lib/python2.7/site-packages/tensorflow/python/training/optimizer.pyc in compute_gradients(self, loss, var_list, gate_gradients, aggregation_method, colocate_gradients_with_ops, grad_loss)
406 processors = [_get_processor(v) for v in var_list]
407 if not var_list:
--> 408 raise ValueError("No variables to optimize.")
409 var_refs = [p.target() for p in processors]
410 grads = gradients.gradients(
ValueError: No variables to optimize.