Commit a9563678 authored by Yuxin Wu's avatar Yuxin Wu

update decorator to work better with sphinx

parent 90dd3ef4
...@@ -31,4 +31,4 @@ class ILSVRCMeta(object): ...@@ -31,4 +31,4 @@ class ILSVRCMeta(object):
if __name__ == '__main__': if __name__ == '__main__':
meta = ILSVRCMeta() meta = ILSVRCMeta()
print meta.get_synset_words_1000() print(meta.get_synset_words_1000())
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
import sys, os import sys, os
from scipy.misc import imsave from scipy.misc import imsave
from ..utils.utils import mkdir_p from ..utils.fs import mkdir_p
# TODO name_func to write label? # TODO name_func to write label?
def dump_dataset_images(ds, dirname, max_count=None, index=0): def dump_dataset_images(ds, dirname, max_count=None, index=0):
......
...@@ -23,38 +23,35 @@ def layer_register(summary_activation=False): ...@@ -23,38 +23,35 @@ def layer_register(summary_activation=False):
Can be overriden when creating the layer. Can be overriden when creating the layer.
""" """
def wrapper(func): def wrapper(func):
class WrapedObject(object): @wraps(func)
def __init__(self, func): def wrapped_func(*args, **kwargs):
self.f = func name = args[0]
assert isinstance(name, six.string_types), \
'name must be the first argument. Args: {}'.format(str(args))
args = args[1:]
@wraps(func) do_summary = kwargs.pop(
def __call__(self, *args, **kwargs): 'summary_activation', summary_activation)
name = args[0] inputs = args[0]
assert isinstance(name, six.string_types), \ with tf.variable_scope(name) as scope:
'name must be either the first argument. Args: {}'.format(str(args)) outputs = func(*args, **kwargs)
args = args[1:] if scope.name not in _layer_logged:
# log shape info and add activation
logger.info("{} input: {}".format(
scope.name, get_shape_str(inputs)))
logger.info("{} output: {}".format(
scope.name, get_shape_str(outputs)))
do_summary = kwargs.pop( if do_summary:
'summary_activation', summary_activation) if isinstance(outputs, list):
inputs = args[0] for x in outputs:
with tf.variable_scope(name) as scope: add_activation_summary(x, scope.name)
outputs = self.f(*args, **kwargs) else:
if scope.name not in _layer_logged: add_activation_summary(outputs, scope.name)
# log shape info and add activation _layer_logged.add(scope.name)
logger.info("{} input: {}".format( return outputs
scope.name, get_shape_str(inputs))) wrapped_func.f = func # attribute to access the underlining function object
logger.info("{} output: {}".format( return wrapped_func
scope.name, get_shape_str(outputs)))
if do_summary:
if isinstance(outputs, list):
for x in outputs:
add_activation_summary(x, scope.name)
else:
add_activation_summary(outputs, scope.name)
_layer_logged.add(scope.name)
return outputs
return WrapedObject(func)
return wrapper return wrapper
def shape2d(a): def shape2d(a):
......
...@@ -15,6 +15,9 @@ __all__ = ['FullyConnected'] ...@@ -15,6 +15,9 @@ __all__ = ['FullyConnected']
def FullyConnected(x, out_dim, def FullyConnected(x, out_dim,
W_init=None, b_init=None, W_init=None, b_init=None,
nl=tf.nn.relu, use_bias=True): nl=tf.nn.relu, use_bias=True):
"""
Fully-Connected layer
"""
x = batch_flatten(x) x = batch_flatten(x)
in_dim = x.get_shape().as_list()[1] in_dim = x.get_shape().as_list()[1]
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment