Commit b93a9ed0 authored by Yuxin Wu's avatar Yuxin Wu

update docs about summaries

parent 88893e56
......@@ -71,6 +71,10 @@ class RunUpdateOps(RunOp):
_chief_only = False
def __init__(self, collection=tf.GraphKeys.UPDATE_OPS):
"""
Args:
collection (str): collection of ops to run. Defaults to ``tf.GraphKeys.UPDATE_OPS``
"""
name = 'UPDATE_OPS' if collection == tf.GraphKeys.UPDATE_OPS else collection
def f():
......
......@@ -13,15 +13,19 @@ __all__ = ['MovingAverageSummary', 'MergeAllSummaries']
class MovingAverageSummary(Callback):
""" Maintain the moving average of the tensors
in every step, and summarize them. Enabled by default.
"""
This callback is enabled by default.
Maintain the moving average of summarized tensors in every step,
by ops added to the collection.
Note that it only maintains the EMAs, the actual summary should be done in other callbacks.
"""
def __init__(self, collection=MOVING_SUMMARY_OPS_KEY):
"""
Args:
collection(str): the collection of EMA-maintaining ops.
The default would work with :func:`add_moving_summary()`,
but you can use some others.
The default value would work with
the tensors you added by :func:`tfutils.summary.add_moving_summary()`,
but you can use other collections as well.
"""
self._collection = collection
......@@ -29,7 +33,7 @@ class MovingAverageSummary(Callback):
ops = tf.get_collection(self._collection)
logger.info("Maintain moving average summary of {} tensors.".format(len(ops)))
self.ema_op = tf.group(*ops, name='summary_moving_averages')
self.ema_op = tf.group(*ops, name='maintain_moving_average_summary')
self._fetch = tf.train.SessionRunArgs(fetches=self.ema_op)
def _before_run(self, _):
......@@ -89,6 +93,7 @@ class MergeAllSummaries_RunWithOp(Callback):
def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES):
"""
This callback is enabled by default.
Evaluate all summaries by `tf.summary.merge_all`, and write to logs.
Args:
......@@ -101,6 +106,7 @@ def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES):
For :class:`SimpleTrainer`, it needs to be False because summary may
depend on inputs.
key (str): the collection of summary tensors. Same as in `tf.summary.merge_all`.
Default is ``tf.GraphKeys.SUMMARIES``
Returns:
a Callback.
......
......@@ -19,8 +19,7 @@ from .tower import get_current_tower_context
from .symbolic_functions import rms
from .scope_utils import cached_name_scope
__all__ = ['create_scalar_summary', 'create_image_summary',
'add_tensor_summary', 'add_param_summary',
__all__ = ['add_tensor_summary', 'add_param_summary',
'add_activation_summary', 'add_moving_summary']
......@@ -97,9 +96,9 @@ def add_tensor_summary(x, types, name=None, collections=None,
x (tf.Tensor): a tensor to summarize
types (list[str]): can be scalar/histogram/sparsity/mean/rms
name (str): summary name. Defaults to be the op name.
collections (str): same as in `tf.summary.scalar`.
main_tower_only (bool): Only run under main training tower. When
setting to True, calling this function under other TowerContext
collections (list[str]): collections of the summary ops.
main_tower_only (bool): Only run under main training tower. If
set to True, calling this function under other TowerContext
has no effect.
Examples:
......@@ -141,6 +140,7 @@ def add_activation_summary(x, name=None, collections=None):
Args:
x (tf.Tensor): the tensor to summary.
name (str): if is None, use x.name.
collections (list[str]): collections of the summary ops.
"""
ctx = get_current_tower_context()
if ctx is not None and not ctx.is_main_training_tower:
......@@ -164,7 +164,7 @@ def add_param_summary(*summary_lists, **kwargs):
Args:
summary_lists (list): each is (regex, [list of summary type]).
Summary type is defined in :func:`add_tensor_summary`.
kwargs: only ``collections`` is allowed.
collections (list[str]): collections of the summary ops.
Examples:
......@@ -195,8 +195,7 @@ def add_param_summary(*summary_lists, **kwargs):
def add_moving_summary(*args, **kwargs):
"""
Enable moving average summary for some tensors.
It's only effective in the main training tower, otherwise calling this
function is a no-op.
This function is a no-op if not calling from main training tower.
Args:
args: tensors to summary
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment