From a2613a1e2552a315e5c74b8841b9957a0bce23bc Mon Sep 17 00:00:00 2001 From: boscotsang Date: Wed, 21 Dec 2016 21:32:30 +0800 Subject: [PATCH] Fix batchnormlayer compatibility to TF12 Add compatibility to TF12 cause by the change of ones_initializer api. --- tensorlayer/layers.py | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/tensorlayer/layers.py b/tensorlayer/layers.py index f99892fae..65bd6f072 100755 --- a/tensorlayer/layers.py +++ b/tensorlayer/layers.py @@ -1719,9 +1719,14 @@ def _get_variable(name, beta = _get_variable('beta', params_shape, initializer=beta_init) - gamma = _get_variable('gamma', - params_shape, - initializer=gamma_init) + try: # TF12 + gamma = _get_variable('gamma', + params_shape, + initializer=gamma_init()) + except: # TF11 + gamma = _get_variable('gamma', + params_shape, + initializer=gamma_init) # trainable=False means : it prevent TF from updating this variable # from the gradient, we have to update this from the mean computed