Commit d4c5c4f4 authored by Yuxin Wu's avatar Yuxin Wu

more fixes on old usage of add_moving_summary (#805)

parent c84b6c24
...@@ -115,7 +115,7 @@ class Model(ModelDesc): ...@@ -115,7 +115,7 @@ class Model(ModelDesc):
add_param_summary(('.*/W', ['histogram'])) # monitor W add_param_summary(('.*/W', ['histogram'])) # monitor W
total_cost = tf.add_n(costs, name='cost') total_cost = tf.add_n(costs, name='cost')
add_moving_summary(*(costs + [wrong, total_cost])) add_moving_summary(wrong, total_cost, *costs)
return total_cost return total_cost
def optimizer(self): def optimizer(self):
......
...@@ -96,7 +96,7 @@ class Model(ModelDesc): ...@@ -96,7 +96,7 @@ class Model(ModelDesc):
loss3 = tf.reduce_mean(loss3, name='loss3') loss3 = tf.reduce_mean(loss3, name='loss3')
cost = tf.add_n([loss3, 0.3 * loss2, 0.3 * loss1], name='weighted_cost') cost = tf.add_n([loss3, 0.3 * loss2, 0.3 * loss1], name='weighted_cost')
add_moving_summary([cost, loss1, loss2, loss3]) add_moving_summary(cost, loss1, loss2, loss3)
def prediction_incorrect(logits, label, topk, name): def prediction_incorrect(logits, label, topk, name):
return tf.cast(tf.logical_not(tf.nn.in_top_k(logits, label, topk)), tf.float32, name=name) return tf.cast(tf.logical_not(tf.nn.in_top_k(logits, label, topk)), tf.float32, name=name)
......
...@@ -196,7 +196,6 @@ def add_param_summary(*summary_lists, **kwargs): ...@@ -196,7 +196,6 @@ def add_param_summary(*summary_lists, **kwargs):
add_tensor_summary(p, actions, name=name, collections=collections) add_tensor_summary(p, actions, name=name, collections=collections)
# TODO: collection for the summary op
def add_moving_summary(*args, **kwargs): def add_moving_summary(*args, **kwargs):
""" """
Summarize the moving average for scalar tensors. Summarize the moving average for scalar tensors.
...@@ -228,6 +227,10 @@ def add_moving_summary(*args, **kwargs): ...@@ -228,6 +227,10 @@ def add_moving_summary(*args, **kwargs):
logger.warn("add_moving_summary() called under reuse=True scope, ignored.") logger.warn("add_moving_summary() called under reuse=True scope, ignored.")
return [] return []
if len(args) == 1 and isinstance(args[0], (list, tuple)):
logger.warn("add_moving_summary() takes positional args instead of an iterable of tensors!")
args = args[0]
for x in args: for x in args:
assert isinstance(x, (tf.Tensor, tf.Variable)), x assert isinstance(x, (tf.Tensor, tf.Variable)), x
assert x.get_shape().ndims == 0, \ assert x.get_shape().ndims == 0, \
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment