Commit ab507874 authored by Yuxin Wu's avatar Yuxin Wu

Reenter the same name_scope when reusing a variable_scope

parent 79f4760a
......@@ -27,7 +27,8 @@ class MovingAverageSummary(Callback):
def _setup_graph(self):
ops = tf.get_collection(self._collection)
logger.info("Maintain moving averages of {} tensors.".format(len(ops)))
logger.info("Maintain moving average summary of {} tensors.".format(len(ops)))
self.ema_op = tf.group(*ops, name='summary_moving_averages')
self._fetch = tf.train.SessionRunArgs(fetches=self.ema_op)
......
......@@ -44,8 +44,10 @@ def auto_reuse_variable_scope(func):
h = hash((tf.get_default_graph(), scope.name))
# print("Entering " + scope.name + " reuse: " + str(h in used_scope))
if h in used_scope:
ns = scope.original_name_scope
with tf.variable_scope(scope, reuse=True):
return func(*args, **kwargs)
with tf.name_scope(ns):
return func(*args, **kwargs)
else:
used_scope.add(h)
return func(*args, **kwargs)
......
......@@ -166,8 +166,10 @@ def add_moving_summary(v, *args, **kwargs):
# TODO will produce variable tower0/xxx?
# TODO not saved under distributed
# TODO use zero_debias
# TODO create EMA for each variable separately, so that the maintain ops
# have a decent name (rather than EMA)
gs = get_global_step_var()
with tf.name_scope(None), tf.device(gs.device):
with tf.device(gs.device):
averager = tf.train.ExponentialMovingAverage(
decay, num_updates=gs, name='EMA')
avg_maintain_op = averager.apply(v)
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment