Commit d46d3926 authored by Yuxin Wu's avatar Yuxin Wu

update docs

parent bba7a9f5
......@@ -44,20 +44,17 @@ class Model(ModelDesc):
image = image * 2 - 1 # center the pixels values at zero
# The context manager `argscope` sets the default option for all the layers under
# this context. Here we use 32 channel convolution with shape 3x3
with argscope(Conv2D, kernel_shape=3, nl=tf.nn.relu, out_channel=32):
l = tf.layers.conv2d(image, 32, 3, padding='same', activation=tf.nn.relu, name='conv0')
l = tf.layers.max_pooling2d(l, 2, 2, padding='valid')
l = tf.layers.conv2d(l, 32, 3, padding='same', activation=tf.nn.relu, name='conv1')
l = tf.layers.conv2d(l, 32, 3, padding='same', activation=tf.nn.relu, name='conv2')
l = tf.layers.max_pooling2d(l, 2, 2, padding='valid')
l = tf.layers.conv2d(l, 32, 3, padding='same', activation=tf.nn.relu, name='conv3')
l = tf.layers.flatten(l)
l = tf.layers.dense(l, 512, activation=tf.nn.relu, name='fc0')
l = tf.layers.dropout(l, rate=0.5,
training=get_current_tower_context().is_training)
logits = tf.layers.dense(l, 10, activation=tf.identity, name='fc1')
l = tf.layers.conv2d(image, 32, 3, padding='same', activation=tf.nn.relu, name='conv0')
l = tf.layers.max_pooling2d(l, 2, 2, padding='valid')
l = tf.layers.conv2d(l, 32, 3, padding='same', activation=tf.nn.relu, name='conv1')
l = tf.layers.conv2d(l, 32, 3, padding='same', activation=tf.nn.relu, name='conv2')
l = tf.layers.max_pooling2d(l, 2, 2, padding='valid')
l = tf.layers.conv2d(l, 32, 3, padding='same', activation=tf.nn.relu, name='conv3')
l = tf.layers.flatten(l)
l = tf.layers.dense(l, 512, activation=tf.nn.relu, name='fc0')
l = tf.layers.dropout(l, rate=0.5,
training=get_current_tower_context().is_training)
logits = tf.layers.dense(l, 10, activation=tf.identity, name='fc1')
tf.nn.softmax(logits, name='prob') # a Bx10 with probabilities
......
......@@ -100,6 +100,8 @@ class ProcessTensors(Callback):
"""
Fetch extra tensors **along with** each training step,
and call some function over the values.
It uses `_{before,after}_run` method to inject `tf.train.SessionRunHooks`
to the session.
You can use it to print tensors, save tensors to file, etc.
Examples:
......
......@@ -11,6 +11,9 @@ __all__ = ['PeriodicTrigger', 'PeriodicRunHooks', 'EnableCallbackIf']
class PeriodicTrigger(ProxyCallback):
"""
Schedule to trigger a callback every k global steps or every k epochs by its ``trigger()`` method.
Note that it does not touch other methods (``before/after_run``,
``trigger_step``, etc).
"""
_chief_only = False
......@@ -99,7 +102,7 @@ class EnableCallbackIf(ProxyCallback):
"""
Args:
callback (Callback):
pred (self -> bool): a callable predicate
pred (self -> bool): a callable predicate. Has to be a pure function.
"""
self._pred = pred
super(EnableCallbackIf, self).__init__(callback)
......@@ -134,3 +137,6 @@ class EnableCallbackIf(ProxyCallback):
def _trigger_step(self):
if self._pred(self):
super(EnableCallbackIf, self)._trigger_step()
def __str__(self):
return "EnableCallbackIf-" + str(self.cb)
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment