Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
d46d3926
Commit
d46d3926
authored
Feb 12, 2018
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
update docs
parent
bba7a9f5
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
20 additions
and
15 deletions
+20
-15
examples/basics/mnist-tflayers.py
examples/basics/mnist-tflayers.py
+11
-14
tensorpack/callbacks/graph.py
tensorpack/callbacks/graph.py
+2
-0
tensorpack/callbacks/trigger.py
tensorpack/callbacks/trigger.py
+7
-1
No files found.
examples/basics/mnist-tflayers.py
View file @
d46d3926
...
@@ -44,20 +44,17 @@ class Model(ModelDesc):
...
@@ -44,20 +44,17 @@ class Model(ModelDesc):
image
=
image
*
2
-
1
# center the pixels values at zero
image
=
image
*
2
-
1
# center the pixels values at zero
# The context manager `argscope` sets the default option for all the layers under
l
=
tf
.
layers
.
conv2d
(
image
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv0'
)
# this context. Here we use 32 channel convolution with shape 3x3
l
=
tf
.
layers
.
max_pooling2d
(
l
,
2
,
2
,
padding
=
'valid'
)
with
argscope
(
Conv2D
,
kernel_shape
=
3
,
nl
=
tf
.
nn
.
relu
,
out_channel
=
32
):
l
=
tf
.
layers
.
conv2d
(
l
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv1'
)
l
=
tf
.
layers
.
conv2d
(
image
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv0'
)
l
=
tf
.
layers
.
conv2d
(
l
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv2'
)
l
=
tf
.
layers
.
max_pooling2d
(
l
,
2
,
2
,
padding
=
'valid'
)
l
=
tf
.
layers
.
max_pooling2d
(
l
,
2
,
2
,
padding
=
'valid'
)
l
=
tf
.
layers
.
conv2d
(
l
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv1'
)
l
=
tf
.
layers
.
conv2d
(
l
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv3'
)
l
=
tf
.
layers
.
conv2d
(
l
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv2'
)
l
=
tf
.
layers
.
flatten
(
l
)
l
=
tf
.
layers
.
max_pooling2d
(
l
,
2
,
2
,
padding
=
'valid'
)
l
=
tf
.
layers
.
dense
(
l
,
512
,
activation
=
tf
.
nn
.
relu
,
name
=
'fc0'
)
l
=
tf
.
layers
.
conv2d
(
l
,
32
,
3
,
padding
=
'same'
,
activation
=
tf
.
nn
.
relu
,
name
=
'conv3'
)
l
=
tf
.
layers
.
dropout
(
l
,
rate
=
0.5
,
l
=
tf
.
layers
.
flatten
(
l
)
training
=
get_current_tower_context
()
.
is_training
)
l
=
tf
.
layers
.
dense
(
l
,
512
,
activation
=
tf
.
nn
.
relu
,
name
=
'fc0'
)
logits
=
tf
.
layers
.
dense
(
l
,
10
,
activation
=
tf
.
identity
,
name
=
'fc1'
)
l
=
tf
.
layers
.
dropout
(
l
,
rate
=
0.5
,
training
=
get_current_tower_context
()
.
is_training
)
logits
=
tf
.
layers
.
dense
(
l
,
10
,
activation
=
tf
.
identity
,
name
=
'fc1'
)
tf
.
nn
.
softmax
(
logits
,
name
=
'prob'
)
# a Bx10 with probabilities
tf
.
nn
.
softmax
(
logits
,
name
=
'prob'
)
# a Bx10 with probabilities
...
...
tensorpack/callbacks/graph.py
View file @
d46d3926
...
@@ -100,6 +100,8 @@ class ProcessTensors(Callback):
...
@@ -100,6 +100,8 @@ class ProcessTensors(Callback):
"""
"""
Fetch extra tensors **along with** each training step,
Fetch extra tensors **along with** each training step,
and call some function over the values.
and call some function over the values.
It uses `_{before,after}_run` method to inject `tf.train.SessionRunHooks`
to the session.
You can use it to print tensors, save tensors to file, etc.
You can use it to print tensors, save tensors to file, etc.
Examples:
Examples:
...
...
tensorpack/callbacks/trigger.py
View file @
d46d3926
...
@@ -11,6 +11,9 @@ __all__ = ['PeriodicTrigger', 'PeriodicRunHooks', 'EnableCallbackIf']
...
@@ -11,6 +11,9 @@ __all__ = ['PeriodicTrigger', 'PeriodicRunHooks', 'EnableCallbackIf']
class
PeriodicTrigger
(
ProxyCallback
):
class
PeriodicTrigger
(
ProxyCallback
):
"""
"""
Schedule to trigger a callback every k global steps or every k epochs by its ``trigger()`` method.
Schedule to trigger a callback every k global steps or every k epochs by its ``trigger()`` method.
Note that it does not touch other methods (``before/after_run``,
``trigger_step``, etc).
"""
"""
_chief_only
=
False
_chief_only
=
False
...
@@ -99,7 +102,7 @@ class EnableCallbackIf(ProxyCallback):
...
@@ -99,7 +102,7 @@ class EnableCallbackIf(ProxyCallback):
"""
"""
Args:
Args:
callback (Callback):
callback (Callback):
pred (self -> bool): a callable predicate
pred (self -> bool): a callable predicate
. Has to be a pure function.
"""
"""
self
.
_pred
=
pred
self
.
_pred
=
pred
super
(
EnableCallbackIf
,
self
)
.
__init__
(
callback
)
super
(
EnableCallbackIf
,
self
)
.
__init__
(
callback
)
...
@@ -134,3 +137,6 @@ class EnableCallbackIf(ProxyCallback):
...
@@ -134,3 +137,6 @@ class EnableCallbackIf(ProxyCallback):
def
_trigger_step
(
self
):
def
_trigger_step
(
self
):
if
self
.
_pred
(
self
):
if
self
.
_pred
(
self
):
super
(
EnableCallbackIf
,
self
)
.
_trigger_step
()
super
(
EnableCallbackIf
,
self
)
.
_trigger_step
()
def
__str__
(
self
):
return
"EnableCallbackIf-"
+
str
(
self
.
cb
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment