Commit b93a9ed0 authored by Yuxin Wu's avatar Yuxin Wu

update docs about summaries

parent 88893e56
...@@ -71,6 +71,10 @@ class RunUpdateOps(RunOp): ...@@ -71,6 +71,10 @@ class RunUpdateOps(RunOp):
_chief_only = False _chief_only = False
def __init__(self, collection=tf.GraphKeys.UPDATE_OPS): def __init__(self, collection=tf.GraphKeys.UPDATE_OPS):
"""
Args:
collection (str): collection of ops to run. Defaults to ``tf.GraphKeys.UPDATE_OPS``
"""
name = 'UPDATE_OPS' if collection == tf.GraphKeys.UPDATE_OPS else collection name = 'UPDATE_OPS' if collection == tf.GraphKeys.UPDATE_OPS else collection
def f(): def f():
......
...@@ -13,15 +13,19 @@ __all__ = ['MovingAverageSummary', 'MergeAllSummaries'] ...@@ -13,15 +13,19 @@ __all__ = ['MovingAverageSummary', 'MergeAllSummaries']
class MovingAverageSummary(Callback): class MovingAverageSummary(Callback):
""" Maintain the moving average of the tensors """
in every step, and summarize them. Enabled by default. This callback is enabled by default.
Maintain the moving average of summarized tensors in every step,
by ops added to the collection.
Note that it only maintains the EMAs, the actual summary should be done in other callbacks.
""" """
def __init__(self, collection=MOVING_SUMMARY_OPS_KEY): def __init__(self, collection=MOVING_SUMMARY_OPS_KEY):
""" """
Args: Args:
collection(str): the collection of EMA-maintaining ops. collection(str): the collection of EMA-maintaining ops.
The default would work with :func:`add_moving_summary()`, The default value would work with
but you can use some others. the tensors you added by :func:`tfutils.summary.add_moving_summary()`,
but you can use other collections as well.
""" """
self._collection = collection self._collection = collection
...@@ -29,7 +33,7 @@ class MovingAverageSummary(Callback): ...@@ -29,7 +33,7 @@ class MovingAverageSummary(Callback):
ops = tf.get_collection(self._collection) ops = tf.get_collection(self._collection)
logger.info("Maintain moving average summary of {} tensors.".format(len(ops))) logger.info("Maintain moving average summary of {} tensors.".format(len(ops)))
self.ema_op = tf.group(*ops, name='summary_moving_averages') self.ema_op = tf.group(*ops, name='maintain_moving_average_summary')
self._fetch = tf.train.SessionRunArgs(fetches=self.ema_op) self._fetch = tf.train.SessionRunArgs(fetches=self.ema_op)
def _before_run(self, _): def _before_run(self, _):
...@@ -89,6 +93,7 @@ class MergeAllSummaries_RunWithOp(Callback): ...@@ -89,6 +93,7 @@ class MergeAllSummaries_RunWithOp(Callback):
def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES): def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES):
""" """
This callback is enabled by default.
Evaluate all summaries by `tf.summary.merge_all`, and write to logs. Evaluate all summaries by `tf.summary.merge_all`, and write to logs.
Args: Args:
...@@ -101,6 +106,7 @@ def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES): ...@@ -101,6 +106,7 @@ def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES):
For :class:`SimpleTrainer`, it needs to be False because summary may For :class:`SimpleTrainer`, it needs to be False because summary may
depend on inputs. depend on inputs.
key (str): the collection of summary tensors. Same as in `tf.summary.merge_all`. key (str): the collection of summary tensors. Same as in `tf.summary.merge_all`.
Default is ``tf.GraphKeys.SUMMARIES``
Returns: Returns:
a Callback. a Callback.
......
...@@ -19,8 +19,7 @@ from .tower import get_current_tower_context ...@@ -19,8 +19,7 @@ from .tower import get_current_tower_context
from .symbolic_functions import rms from .symbolic_functions import rms
from .scope_utils import cached_name_scope from .scope_utils import cached_name_scope
__all__ = ['create_scalar_summary', 'create_image_summary', __all__ = ['add_tensor_summary', 'add_param_summary',
'add_tensor_summary', 'add_param_summary',
'add_activation_summary', 'add_moving_summary'] 'add_activation_summary', 'add_moving_summary']
...@@ -97,9 +96,9 @@ def add_tensor_summary(x, types, name=None, collections=None, ...@@ -97,9 +96,9 @@ def add_tensor_summary(x, types, name=None, collections=None,
x (tf.Tensor): a tensor to summarize x (tf.Tensor): a tensor to summarize
types (list[str]): can be scalar/histogram/sparsity/mean/rms types (list[str]): can be scalar/histogram/sparsity/mean/rms
name (str): summary name. Defaults to be the op name. name (str): summary name. Defaults to be the op name.
collections (str): same as in `tf.summary.scalar`. collections (list[str]): collections of the summary ops.
main_tower_only (bool): Only run under main training tower. When main_tower_only (bool): Only run under main training tower. If
setting to True, calling this function under other TowerContext set to True, calling this function under other TowerContext
has no effect. has no effect.
Examples: Examples:
...@@ -141,6 +140,7 @@ def add_activation_summary(x, name=None, collections=None): ...@@ -141,6 +140,7 @@ def add_activation_summary(x, name=None, collections=None):
Args: Args:
x (tf.Tensor): the tensor to summary. x (tf.Tensor): the tensor to summary.
name (str): if is None, use x.name. name (str): if is None, use x.name.
collections (list[str]): collections of the summary ops.
""" """
ctx = get_current_tower_context() ctx = get_current_tower_context()
if ctx is not None and not ctx.is_main_training_tower: if ctx is not None and not ctx.is_main_training_tower:
...@@ -164,7 +164,7 @@ def add_param_summary(*summary_lists, **kwargs): ...@@ -164,7 +164,7 @@ def add_param_summary(*summary_lists, **kwargs):
Args: Args:
summary_lists (list): each is (regex, [list of summary type]). summary_lists (list): each is (regex, [list of summary type]).
Summary type is defined in :func:`add_tensor_summary`. Summary type is defined in :func:`add_tensor_summary`.
kwargs: only ``collections`` is allowed. collections (list[str]): collections of the summary ops.
Examples: Examples:
...@@ -195,8 +195,7 @@ def add_param_summary(*summary_lists, **kwargs): ...@@ -195,8 +195,7 @@ def add_param_summary(*summary_lists, **kwargs):
def add_moving_summary(*args, **kwargs): def add_moving_summary(*args, **kwargs):
""" """
Enable moving average summary for some tensors. Enable moving average summary for some tensors.
It's only effective in the main training tower, otherwise calling this This function is a no-op if not calling from main training tower.
function is a no-op.
Args: Args:
args: tensors to summary args: tensors to summary
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment