I am trying to generate synthetic data with tensorflow autoencoder that is very close to given original data. But, autoencoder is not learning in training phase. My cost function is not decreasing generally and synthetic data unrelated with original data. My code is given below:
x = tf.placeholder("float", [None, COLUMN])
# Weights and biases to hidden layer
Wh = tf.Variable(tf.random_uniform((COLUMN, UNITS_OF_HIDDEN_LAYER), -1.0 / mpmath.sqrt(COLUMN), 1.0 / mpmath.sqrt(COLUMN)))
bh = tf.Variable(tf.zeros([UNITS_OF_HIDDEN_LAYER]))
h = tf.nn.sigmoid(tf.matmul(x, Wh) + bh)
# Weights and biases to output layer
Wo = tf.transpose(Wh) # tied weights
bo = tf.Variable(tf.zeros([COLUMN]))
y = tf.nn.sigmoid(tf.matmul(h, Wo) + bo)
# Objective functions
cross_entropy = tf.reduce_mean(tf.pow(x - y, 2))
optimizer = tf.train.RMSPropOptimizer(LEARNING_RATE).minimize(cross_entropy)
init = tf.global_variables_initializer()
sess = tf.Session()
sess.run(init)
train_number, _ = x_train.shape
for j in range(TRAINING_EPOCHS):
sample = np.random.randint(train_number, size=BATCH_SIZE)
batch_xs = x_train[sample][:]
_, cost = sess.run([optimizer, cross_entropy], feed_dict={x: batch_xs})
print("COST: ", cost)
encodedTensor = tf.nn.sigmoid(tf.add(tf.matmul(x_train, Wh), bh))
encodedData = sess.run(encodedTensor)
decodedTensor = tf.nn.sigmoid(tf.add(tf.matmul(encodedData, Wo), bo))
decodedData = sess.run(decodedTensor)
return decodedData