Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Variable batch_normalization_1/moving_mean/biased already exists, disallowed error #1

Open
kktam opened this issue Apr 27, 2018 · 0 comments

Comments

@kktam
Copy link

kktam commented Apr 27, 2018

Hi

I am running facenet on following configuration in Anaconda when I encounter a variable disallowed error.

Anaconda: 1.6.12
numpy: 1.12.1
Tensorflow: 1.10
Keras: 2.1.6

Have you seen this issue? Which version did you run in when you last executed the test run?

The error occurred when running block 20:

im_in = Input(shape=(200,200,4))
#wrong = Input(shape=(200,200,3))

x1 = modelsqueeze(im_in)
#x = Convolution2D(64, (5, 5), padding='valid', strides =(2,2))(x)

#x1 = MaxPooling2D(pool_size=(3, 3), strides=(2, 2))(x1)

"""
x1 = Convolution2D(256, (3,3), padding='valid', activation="relu")(x1)
x1 = Dropout(0.4)(x1)

x1 = MaxPooling2D(pool_size=(3, 3), strides=(1, 1))(x1)

x1 = Convolution2D(256, (3,3), padding='valid', activation="relu")(x1)
x1 = BatchNormalization()(x1)
x1 = Dropout(0.4)(x1)

x1 = Convolution2D(64, (1,1), padding='same', activation="relu")(x1)
x1 = BatchNormalization()(x1)
x1 = Dropout(0.4)(x1)
"""



x1 = Flatten()(x1)

x1 = Dense(512, activation="relu")(x1)
x1 = Dropout(0.2)(x1)
#x1 = BatchNormalization()(x1)
feat_x = Dense(128, activation="linear")(x1)
feat_x = Lambda(lambda  x: K.l2_normalize(x,axis=1))(feat_x)


model_top = Model(inputs = [im_in], outputs = feat_x)

model_top.summary()

im_in1 = Input(shape=(200,200,4))
im_in2 = Input(shape=(200,200,4))

feat_x1 = model_top(im_in1)
feat_x2 = model_top(im_in2)


lambda_merge = Lambda(euclidean_distance)([feat_x1, feat_x2])


model_final = Model(inputs = [im_in1, im_in2], outputs = lambda_merge)

model_final.summary()

adam = Adam(lr=0.001)

sgd = SGD(lr=0.001, momentum=0.9)

model_final.compile(optimizer=adam, loss=contrastive_loss)

The error is:

---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-20-d87010ad9d0a> in <module>()
      2 #wrong = Input(shape=(200,200,3))
      3 
----> 4 x1 = modelsqueeze(im_in)
      5 #x = Convolution2D(64, (5, 5), padding='valid', strides =(2,2))(x)
      6 

~/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py in __call__(self, inputs, **kwargs)
    617 
    618             # Actually call the layer, collecting output(s), mask(s), and shape(s).
--> 619             output = self.call(inputs, **kwargs)
    620             output_mask = self.compute_mask(inputs, previous_mask)
    621 

~/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py in call(self, inputs, mask)
   2083             return self._output_tensor_cache[cache_key]
   2084         else:
-> 2085             output_tensors, _, _ = self.run_internal_graph(inputs, masks)
   2086             return output_tensors
   2087 

~/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py in run_internal_graph(self, inputs, masks)
   2233                                 if 'mask' not in kwargs:
   2234                                     kwargs['mask'] = computed_mask
-> 2235                             output_tensors = _to_list(layer.call(computed_tensor, **kwargs))
   2236                             output_masks = layer.compute_mask(computed_tensor,
   2237                                                               computed_mask)

~/anaconda3/lib/python3.6/site-packages/keras/layers/normalization.py in call(self, inputs, training)
    191         self.add_update([K.moving_average_update(self.moving_mean,
    192                                                  mean,
--> 193                                                  self.momentum),
    194                          K.moving_average_update(self.moving_variance,
    195                                                  variance,

~/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py in moving_average_update(x, value, momentum)
   1003     """
   1004     return moving_averages.assign_moving_average(
-> 1005         x, value, momentum, zero_debias=True)
   1006 
   1007 

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/moving_averages.py in assign_moving_average(variable, value, decay, zero_debias, name)
     68         decay = math_ops.cast(decay, variable.dtype.base_dtype)
     69       if zero_debias:
---> 70         update_delta = _zero_debias(variable, value, decay)
     71       else:
     72         update_delta = (variable - value) * decay

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/training/moving_averages.py in _zero_debias(unbiased_var, value, decay)
    178         local_step_initializer = init_ops.zeros_initializer()
    179       biased_var = variable_scope.get_variable(
--> 180           "biased", initializer=biased_initializer, trainable=False)
    181       local_step = variable_scope.get_variable(
    182           "local_step",

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
   1047       collections=collections, caching_device=caching_device,
   1048       partitioner=partitioner, validate_shape=validate_shape,
-> 1049       use_resource=use_resource, custom_getter=custom_getter)
   1050 get_variable_or_local_docstring = (
   1051     """%s

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, var_store, name, shape, dtype, initializer, regularizer, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
    946           collections=collections, caching_device=caching_device,
    947           partitioner=partitioner, validate_shape=validate_shape,
--> 948           use_resource=use_resource, custom_getter=custom_getter)
    949 
    950   def _get_partitioned_variable(self,

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in get_variable(self, name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource, custom_getter)
    354           reuse=reuse, trainable=trainable, collections=collections,
    355           caching_device=caching_device, partitioner=partitioner,
--> 356           validate_shape=validate_shape, use_resource=use_resource)
    357 
    358   def _get_partitioned_variable(

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in _true_getter(name, shape, dtype, initializer, regularizer, reuse, trainable, collections, caching_device, partitioner, validate_shape, use_resource)
    339           trainable=trainable, collections=collections,
    340           caching_device=caching_device, validate_shape=validate_shape,
--> 341           use_resource=use_resource)
    342 
    343     if custom_getter is not None:

~/anaconda3/lib/python3.6/site-packages/tensorflow/python/ops/variable_scope.py in _get_single_variable(self, name, shape, dtype, initializer, regularizer, partition_info, reuse, trainable, collections, caching_device, validate_shape, use_resource)
    651                          " Did you mean to set reuse=True in VarScope? "
    652                          "Originally defined at:\n\n%s" % (
--> 653                              name, "".join(traceback.format_list(tb))))
    654       found_var = self._vars[name]
    655       if not shape.is_compatible_with(found_var.get_shape()):

ValueError: Variable batch_normalization_1/moving_mean/biased already exists, disallowed. Did you mean to set reuse=True in VarScope? Originally defined at:

  File "/Users/xyz/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py", line 1005, in moving_average_update
    x, value, momentum, zero_debias=True)
  File "/Users/xyz/anaconda3/lib/python3.6/site-packages/keras/layers/normalization.py", line 193, in call
    self.momentum),
  File "/Users/xyz/anaconda3/lib/python3.6/site-packages/keras/engine/topology.py", line 619, in __call__
    output = self.call(inputs, **kwargs)
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment
Labels
None yet
Projects
None yet
Development

No branches or pull requests

1 participant