Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
b93a9ed0
Commit
b93a9ed0
authored
Aug 17, 2017
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update docs about summaries
parent
88893e56
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
22 additions
and
13 deletions
+22
-13
tensorpack/callbacks/graph.py
tensorpack/callbacks/graph.py
+4
-0
tensorpack/callbacks/summary.py
tensorpack/callbacks/summary.py
+11
-5
tensorpack/tfutils/summary.py
tensorpack/tfutils/summary.py
+7
-8
No files found.
tensorpack/callbacks/graph.py
View file @
b93a9ed0
...
@@ -71,6 +71,10 @@ class RunUpdateOps(RunOp):
...
@@ -71,6 +71,10 @@ class RunUpdateOps(RunOp):
_chief_only
=
False
_chief_only
=
False
def
__init__
(
self
,
collection
=
tf
.
GraphKeys
.
UPDATE_OPS
):
def
__init__
(
self
,
collection
=
tf
.
GraphKeys
.
UPDATE_OPS
):
"""
Args:
collection (str): collection of ops to run. Defaults to ``tf.GraphKeys.UPDATE_OPS``
"""
name
=
'UPDATE_OPS'
if
collection
==
tf
.
GraphKeys
.
UPDATE_OPS
else
collection
name
=
'UPDATE_OPS'
if
collection
==
tf
.
GraphKeys
.
UPDATE_OPS
else
collection
def
f
():
def
f
():
...
...
tensorpack/callbacks/summary.py
View file @
b93a9ed0
...
@@ -13,15 +13,19 @@ __all__ = ['MovingAverageSummary', 'MergeAllSummaries']
...
@@ -13,15 +13,19 @@ __all__ = ['MovingAverageSummary', 'MergeAllSummaries']
class
MovingAverageSummary
(
Callback
):
class
MovingAverageSummary
(
Callback
):
""" Maintain the moving average of the tensors
"""
in every step, and summarize them. Enabled by default.
This callback is enabled by default.
Maintain the moving average of summarized tensors in every step,
by ops added to the collection.
Note that it only maintains the EMAs, the actual summary should be done in other callbacks.
"""
"""
def
__init__
(
self
,
collection
=
MOVING_SUMMARY_OPS_KEY
):
def
__init__
(
self
,
collection
=
MOVING_SUMMARY_OPS_KEY
):
"""
"""
Args:
Args:
collection(str): the collection of EMA-maintaining ops.
collection(str): the collection of EMA-maintaining ops.
The default would work with :func:`add_moving_summary()`,
The default value would work with
but you can use some others.
the tensors you added by :func:`tfutils.summary.add_moving_summary()`,
but you can use other collections as well.
"""
"""
self
.
_collection
=
collection
self
.
_collection
=
collection
...
@@ -29,7 +33,7 @@ class MovingAverageSummary(Callback):
...
@@ -29,7 +33,7 @@ class MovingAverageSummary(Callback):
ops
=
tf
.
get_collection
(
self
.
_collection
)
ops
=
tf
.
get_collection
(
self
.
_collection
)
logger
.
info
(
"Maintain moving average summary of {} tensors."
.
format
(
len
(
ops
)))
logger
.
info
(
"Maintain moving average summary of {} tensors."
.
format
(
len
(
ops
)))
self
.
ema_op
=
tf
.
group
(
*
ops
,
name
=
'
summary_moving_averages
'
)
self
.
ema_op
=
tf
.
group
(
*
ops
,
name
=
'
maintain_moving_average_summary
'
)
self
.
_fetch
=
tf
.
train
.
SessionRunArgs
(
fetches
=
self
.
ema_op
)
self
.
_fetch
=
tf
.
train
.
SessionRunArgs
(
fetches
=
self
.
ema_op
)
def
_before_run
(
self
,
_
):
def
_before_run
(
self
,
_
):
...
@@ -89,6 +93,7 @@ class MergeAllSummaries_RunWithOp(Callback):
...
@@ -89,6 +93,7 @@ class MergeAllSummaries_RunWithOp(Callback):
def
MergeAllSummaries
(
period
=
0
,
run_alone
=
False
,
key
=
tf
.
GraphKeys
.
SUMMARIES
):
def
MergeAllSummaries
(
period
=
0
,
run_alone
=
False
,
key
=
tf
.
GraphKeys
.
SUMMARIES
):
"""
"""
This callback is enabled by default.
Evaluate all summaries by `tf.summary.merge_all`, and write to logs.
Evaluate all summaries by `tf.summary.merge_all`, and write to logs.
Args:
Args:
...
@@ -101,6 +106,7 @@ def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES):
...
@@ -101,6 +106,7 @@ def MergeAllSummaries(period=0, run_alone=False, key=tf.GraphKeys.SUMMARIES):
For :class:`SimpleTrainer`, it needs to be False because summary may
For :class:`SimpleTrainer`, it needs to be False because summary may
depend on inputs.
depend on inputs.
key (str): the collection of summary tensors. Same as in `tf.summary.merge_all`.
key (str): the collection of summary tensors. Same as in `tf.summary.merge_all`.
Default is ``tf.GraphKeys.SUMMARIES``
Returns:
Returns:
a Callback.
a Callback.
...
...
tensorpack/tfutils/summary.py
View file @
b93a9ed0
...
@@ -19,8 +19,7 @@ from .tower import get_current_tower_context
...
@@ -19,8 +19,7 @@ from .tower import get_current_tower_context
from
.symbolic_functions
import
rms
from
.symbolic_functions
import
rms
from
.scope_utils
import
cached_name_scope
from
.scope_utils
import
cached_name_scope
__all__
=
[
'create_scalar_summary'
,
'create_image_summary'
,
__all__
=
[
'add_tensor_summary'
,
'add_param_summary'
,
'add_tensor_summary'
,
'add_param_summary'
,
'add_activation_summary'
,
'add_moving_summary'
]
'add_activation_summary'
,
'add_moving_summary'
]
...
@@ -97,9 +96,9 @@ def add_tensor_summary(x, types, name=None, collections=None,
...
@@ -97,9 +96,9 @@ def add_tensor_summary(x, types, name=None, collections=None,
x (tf.Tensor): a tensor to summarize
x (tf.Tensor): a tensor to summarize
types (list[str]): can be scalar/histogram/sparsity/mean/rms
types (list[str]): can be scalar/histogram/sparsity/mean/rms
name (str): summary name. Defaults to be the op name.
name (str): summary name. Defaults to be the op name.
collections (
str): same as in `tf.summary.scalar`
.
collections (
list[str]): collections of the summary ops
.
main_tower_only (bool): Only run under main training tower.
When
main_tower_only (bool): Only run under main training tower.
If
set
ting
to True, calling this function under other TowerContext
set to True, calling this function under other TowerContext
has no effect.
has no effect.
Examples:
Examples:
...
@@ -141,6 +140,7 @@ def add_activation_summary(x, name=None, collections=None):
...
@@ -141,6 +140,7 @@ def add_activation_summary(x, name=None, collections=None):
Args:
Args:
x (tf.Tensor): the tensor to summary.
x (tf.Tensor): the tensor to summary.
name (str): if is None, use x.name.
name (str): if is None, use x.name.
collections (list[str]): collections of the summary ops.
"""
"""
ctx
=
get_current_tower_context
()
ctx
=
get_current_tower_context
()
if
ctx
is
not
None
and
not
ctx
.
is_main_training_tower
:
if
ctx
is
not
None
and
not
ctx
.
is_main_training_tower
:
...
@@ -164,7 +164,7 @@ def add_param_summary(*summary_lists, **kwargs):
...
@@ -164,7 +164,7 @@ def add_param_summary(*summary_lists, **kwargs):
Args:
Args:
summary_lists (list): each is (regex, [list of summary type]).
summary_lists (list): each is (regex, [list of summary type]).
Summary type is defined in :func:`add_tensor_summary`.
Summary type is defined in :func:`add_tensor_summary`.
kwargs: only ``collections`` is allowed
.
collections (list[str]): collections of the summary ops
.
Examples:
Examples:
...
@@ -195,8 +195,7 @@ def add_param_summary(*summary_lists, **kwargs):
...
@@ -195,8 +195,7 @@ def add_param_summary(*summary_lists, **kwargs):
def
add_moving_summary
(
*
args
,
**
kwargs
):
def
add_moving_summary
(
*
args
,
**
kwargs
):
"""
"""
Enable moving average summary for some tensors.
Enable moving average summary for some tensors.
It's only effective in the main training tower, otherwise calling this
This function is a no-op if not calling from main training tower.
function is a no-op.
Args:
Args:
args: tensors to summary
args: tensors to summary
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment