I try to divide my neural network model and restore() function with setting random weights to zero. Here's the model code: http://pastebin.com/TqN6kkeb (it works properly).
And here's the function:
from __future__ import print_function
import tensorflow as tf
tf.GraphKeys.VARIABLES = tf.GraphKeys.GLOBAL_VARIABLES
import random
from LogReg import accuracy
from LogReg import W
from LogReg import x,y
# Import MNIST data
from tensorflow.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=True)
def restore(model_file):
with tf.Session() as sess:
new_saver = tf.train.import_meta_graph(model_file + ".meta")
new_saver.restore(sess, model_file)
with tf.variable_scope("foo", reuse=True):
temp_var = tf.get_variable("W")
size_2a = tf.get_variable("b")
size_2 = tf.shape(size_2a).eval()[0]
size_1 = tf.shape(temp_var).eval()[0]
ones_mask = tf.Variable(tf.ones([size_1, size_2]))
arg = random.sample(xrange(size_1), size_1/2)
index_num=tf.convert_to_tensor(arg, dtype=tf.int32)
print("om", ones_mask)
print("index", index_num)
print(W)
zeroes = tf.zeros([size_1/2, size_2])
update = tf.scatter_update(ones_mask, index_num, zeroes)
print(update)
assign_op = W.assign(tf.mul(W, update))
sess.run(update)
sess.run(assign_op)
init_op = tf.global_variables_initializer()
sess.run(init_op)
new_saver.save(sess, model_file)
print("Accuracy_new:", accuracy.eval({x: mnist.test.images, y:mnist.test.labels}))
restore('./MyModel2')
The problems are: 1) is that it keeps writing me FailedPreconditionError (see above for traceback): Attempting to use uninitialized value Variable in this line:
update = tf.scatter_update(ones_mask, index_num, zeroes)
no matter what. I have read these topics: Prettytensor: Attempting to use uninitialized value and Update a subset of weights in TensorFlow (and many others), but advices from there didn't help to fix my bug. And I don't understand, what's the problem with the initialization as long as I run tf.global_variables_initializer();
2) all of the weights seem to be setting to zero instead of the half, and I can't understand why.
Please, help, I really stuck.