0
import tensorflow.keras as ks
def scope_error_test():
    input_holder = tf.placeholder(dtype=tf.float32,
                                  shape=(None, 368, 368, 3),
                                  name='input')
    with tf.variable_scope("scope_1"):
        with tf.variable_scope("scope_2"):
            conv1 = ks.layers.Conv2D(kernel_size=7,
                                     filters=64,
                                     strides=2,
                                     padding='same',
                                     activation=tf.nn.relu,
                                     name='conv1')(input_holder)
            pool1 = ks.layers.MaxPool2D(pool_size=3, padding='same',
                                        strides=2,
                                        name='pool1')(inputs=conv1)
    print(pool1.get_shape().as_list())
with tf.Session() as sess:
        scope_error_test()
        sess.run(tf.global_variables_initializer())
        print(tf.global_variables())
        with tf.variable_scope('', reuse=True):
            for variable in tf.global_variables():
                var_name = variable.name.split(':')[0]
                var_tf = tf.get_variable(var_name)

In this example code, I want to understand why I get this error: ValueError: Variable scope_1/scope_2/conv1/kernel does not exist, or was not created with tf.get_variable(). Did you mean to set reuse=tf.AUTO_REUSE in VarScope?

Note: I used tensorflow.contrib.layers instead of tensorflow.keras.layers and It was working correctly! Could be a bug !?

I have tried tensorflow 1.9.0 and tensorflow 1.12.0

Your Answer

By clicking "Post Your Answer", you acknowledge that you have read our updated terms of service, privacy policy and cookie policy, and that your continued use of the website is subject to these policies.

Browse other questions tagged or ask your own question.