Extracting values from Tensorflow Variable

Question:

I’m new to Python and Tensorflow and i’m facing some difficulties getting values from my NN after training phase.

import tensorflow as tf
import numpy as np
import input_data

mnist = input_data.read_data_sets("/tmp/data/", one_hot = True)

n_nodes_hl1 = 50
n_nodes_hl2 = 50

n_classes = 10
batch_size = 128

x = tf.placeholder('float',[None, 784])
y = tf.placeholder('float')

def neural_network_model(data):

    hidden_1_layer = {'weights': tf.Variable(tf.random_normal([784,n_nodes_hl1]),name='weights1'),
                      'biases': tf.Variable(tf.random_normal([n_nodes_hl1]),name='biases1')}
    hidden_2_layer = {'weights': tf.Variable(tf.random_normal([n_nodes_hl1, n_nodes_hl2]),name='weights2'),
                      'biases': tf.Variable(tf.random_normal([n_nodes_hl2]),name='biases2')}
    output_layer =   {'weights': tf.Variable(tf.random_normal([n_nodes_hl2, n_classes]),name='weights3'),
                      'biases': tf.Variable(tf.random_normal([n_classes]),name='biases3')}

    l1 = tf.add(tf.matmul(data, hidden_1_layer['weights']) , hidden_1_layer['biases'])
    l1 = tf.nn.relu(l1)

    l2 = tf.add(tf.matmul(l1, hidden_2_layer['weights']) , hidden_2_layer['biases'])
    l2 = tf.nn.relu(l2)

    output = tf.add(tf.matmul(l2, output_layer['weights']) , output_layer['biases'])

     return output


def train_neural_network(x):
    prediction = neural_network_model(x)
    cost = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(logits=prediction,labels=y))
    optimizer = tf.train.AdamOptimizer().minimize(cost)

    hm_epochs = 100
    init = tf.group(tf.global_variables_initializer(), tf.local_variables_initializer() )
    with tf.Session() as sess:
        sess.run(init)
        for epoch in range(hm_epochs):
            epoch_loss = 0
            for _ in range(int(mnist.train.num_examples / batch_size)) :
                 ep_x, ep_y = mnist.train.next_batch(batch_size)
                _, c = sess.run([optimizer, cost], feed_dict = {x: ep_x, y: ep_y})
                epoch_loss += c
            print('Epoch', epoch+1, 'completed out of', hm_epochs, 'loss:',epoch_loss)


        correct = tf.equal(tf.argmax(prediction,1), tf.argmax(y,1))
        accuracy = tf.reduce_mean(tf.cast(correct, 'float'))
        print('Accuracy:', accuracy.eval({x:mnist.test.images, y: mnist.test.labels}))


train_neural_network(x)

I tried to extract weights from layer 1 using:

    w = tf.get_variable('weights1',shape=[784,50])
    b = tf.get_variable('biases1',shape=[50,])
    myWeights, myBiases = sess.run([w,b])

but this throw error Attempting to use uninitialized value weights1_1

is this because my Variables are in a dict type ‘hidden_1_layer’?

I’m not yet comfortable with Python and Tensorflow data types so i’m in total confusion!

Answers:

When you write

w = tf.get_variable('weights1',shape=[784,50])
b = tf.get_variable('biases1',shape=[50,])

you are defining 2 new variables:

  1. weights1 becomes weights1_1
  2. biases1 becomes biases1_1

because the variables with name weights1 and biases1 already exist in the graph, so tensorflow add the _<counter> suffix for you, in order to avoid naming collision.

If you want to create a reference to an already existing variable, you have to become familiar with the concept of variable scope.

In short, you have to make explicit that you want to reuse a certain variable and you can do this using [tf.variable_scope]2 with its reuse parameter.

scope_name =  "" #default scope
with tf.variable_scope(scope_name, reuse=True):
    w = tf.get_variable('weights1',shape=[784,50])
    b = tf.get_variable('biases1',shape=[50,])
Answered By: nessuno

Use the following code:

tensor_1 = tf.get_default_graph().get_tensor_by_name("weights1:0")
tesnor_2 = tf.get_default_graph().get_tensor_by_name("biases1:0")
sess = tf.Session()
np_arrays = sess.run([tensor_1, tensor_2])

Also there are other ways to store the variable for later use or analysis. Please specify your purpose for extracting weights and biases. Comment further if further discussion is needed.

Answered By: Shyam Swaroop

To have it trained values you also can do this way, custom callback method !

class custom_callback(tf.keras.callbacks.Callback): 
    tf.summary.create_file_writer(val_dir)      
    
    def _val_writer(self):
        if 'val' not in self._writers:
            self._writers['val'] = tf.summary.create_file_writer(val_dir)
        return self._writers['val']
    
    def on_epoch_end(self, epoch, logs={}):
        print('weights: ' + str(self.model.get_weights()))
        
        if self.model.optimizer and hasattr(self.model.optimizer, 'iterations'):
            with tf.summary.record_if(True): # self._val_writer.as_default():
                step = ''
                for name, value in logs.items():
                    tf.summary.scalar(
                    'evaluation_' + name + '_vs_iterations',
                    value,
                    step=self.model.optimizer.iterations.read_value(),
                    )           
        if(logs['accuracy'] == None) : pass
        else:
            if(logs['accuracy']> 0.90):
                self.model.stop_training = True
    
        with tf.compat.v1.variable_scope('Value', reuse=tf.compat.v1.AUTO_REUSE):                   
            w1 = tf.compat.v1.get_variable('w2', shape=[256])
            b1 = tf.compat.v1.get_variable('b2', shape=[256,])
            
            print('w1:' + str(w1))
            print('b1:' + str(b1))

custom_callback = custom_callback()

history = model_highscores.fit(batched_features, epochs=99 ,validation_data=(dataset.shuffle(len(list_image))), callbacks=[custom_callback])
Answered By: Jirayu Kaewprateep
Categories: questions Tags: ,
Answers are sorted by their score. The answer accepted by the question owner as the best is marked with
at the top-right corner.