Commit 812329fa authored by Yuxin Wu's avatar Yuxin Wu

Properly set reuse in tf.layers to avoid some surprise

parent 18b19d6d
......@@ -9,6 +9,7 @@ import os
from tensorpack import *
from tensorpack.tfutils.summary import *
from tensorpack.dataflow import dataset
from tensorpack.utils.gpu import get_num_gpu
"""
A small convnet model for Cifar10 or Cifar100 dataset.
......@@ -145,7 +146,7 @@ if __name__ == '__main__':
if args.load:
config.session_init = SaverRestore(args.load)
nr_gpu = len(args.gpu.split(','))
trainer = QueueInputTrainer() if nr_gpu <= 1 \
else SyncMultiGPUTrainerParameterServer(nr_gpu)
num_gpu = get_num_gpu()
trainer = QueueInputTrainer() if num_gpu <= 1 \
else SyncMultiGPUTrainerParameterServer(num_gpu)
launch_train_with_config(config, trainer)
......@@ -110,7 +110,8 @@ def BatchNorm(inputs, axis=None, training=None, momentum=0.9, epsilon=1e-5,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
virtual_batch_size=virtual_batch_size,
fused=True
fused=True,
_reuse=tf.get_variable_scope().reuse
)
else:
assert virtual_batch_size is None, "Feature not supported in this version of TF!"
......@@ -120,7 +121,8 @@ def BatchNorm(inputs, axis=None, training=None, momentum=0.9, epsilon=1e-5,
center=center, scale=scale,
beta_initializer=beta_initializer,
gamma_initializer=gamma_initializer,
fused=True
fused=True,
_reuse=tf.get_variable_scope().reuse
)
xn = layer.apply(inputs, training=training, scope=tf.get_variable_scope())
......@@ -206,7 +208,8 @@ def BatchRenorm(x, rmax, dmax, momentum=0.9, epsilon=1e-5,
'dmax': dmax},
renorm_momentum=0.99,
gamma_initializer=gamma_initializer,
fused=False)
fused=False,
_reuse=tf.get_variable_scope().reuse)
xn = layer.apply(x, training=ctx.is_training, scope=tf.get_variable_scope())
if ctx.is_main_training_tower:
......
......@@ -63,7 +63,8 @@ def Conv2D(
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer)
activity_regularizer=activity_regularizer,
_reuse=tf.get_variable_scope().reuse)
ret = layer.apply(inputs, scope=tf.get_variable_scope())
ret = tf.identity(ret, name='output')
......@@ -164,7 +165,8 @@ def Conv2DTranspose(
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer)
activity_regularizer=activity_regularizer,
_reuse=tf.get_variable_scope().reuse)
ret = layer.apply(inputs, scope=tf.get_variable_scope())
ret = tf.identity(ret, name='output')
......
......@@ -46,7 +46,8 @@ def FullyConnected(
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer)
activity_regularizer=activity_regularizer,
_reuse=tf.get_variable_scope().reuse)
ret = layer.apply(inputs, scope=tf.get_variable_scope())
ret = tf.identity(ret, name='output')
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment