Commit 92748c90 authored by Yuxin Wu's avatar Yuxin Wu

misc cleanups

parent 999846b2
This diff is collapsed.
Casestudies
--------------------
.. toctree::
:maxdepth: 1
colorize
......@@ -359,7 +359,7 @@ def process_signature(app, what, name, obj, options, signature,
signature = re.sub('tensorflow', 'tf', signature)
# add scope name to layer signatures:
if hasattr(obj, 'use_scope') and hasattr(obj, 'symbolic_function'):
if hasattr(obj, 'use_scope'):
if obj.use_scope:
signature = signature[0] + 'variable_scope_name, ' + signature[1:]
elif obj.use_scope is None:
......
......@@ -88,7 +88,7 @@ class OnlineTensorboardExport(Callback):
canvas = cv2.resize(canvas[..., None] * 255, (0, 0), fx=10, fy=10)
self.trainer.monitors.put_image('filter_export', canvas)
# # you might also want to write these images to disk (as in the casestudy from the docs)
# # you might also want to write these images to disk
# cv2.imwrite("export/out%04i.jpg" % self.cc, canvas)
# self.cc += 1
......
......@@ -103,8 +103,8 @@ def layer_register(
if use_scope is False:
logger.warn(
"Please call layer {} without the first scope name argument, "
"or register the layer with use_scope=None to allow "
"two calling methods.".format(func.__name__))
"or register the layer with use_scope=None to allow calling it "
"with scope names.".format(func.__name__))
name, inputs = args[0], args[1]
args = args[1:] # actual positional args used to call func
else:
......@@ -147,8 +147,8 @@ def layer_register(
outputs = func(*args, **actual_args)
return outputs
wrapped_func.symbolic_function = func # attribute to access the underlying function object
wrapped_func.use_scope = use_scope
wrapped_func.__argscope_enabled__ = True
_register(func.__name__, wrapped_func)
return wrapped_func
......
......@@ -42,8 +42,8 @@ so that these names will be invisible under `tensorpack.` namespace.
To use these utilities, users are expected to import them explicitly, e.g.:
import tensorpack.tfutils.symbolic_functions as symbf
import tensorpack.tfutils.sessinit as sessinit
"""
__all__.extend(['sessinit', 'summary', 'optimizer',
'sesscreate', 'gradproc', 'varreplace', 'symbolic_functions',
'distributed', 'tower'])
'sesscreate', 'gradproc', 'varreplace',
'tower'])
......@@ -40,14 +40,8 @@ def argscope(layers, **kwargs):
if not isinstance(layers, list):
layers = [layers]
# def _check_args_exist(l):
# args = inspect.getargspec(l).args
# for k, v in six.iteritems(kwargs):
# assert k in args, "No argument {} in {}".format(k, l.__name__)
for l in layers:
assert hasattr(l, 'symbolic_function'), "{} is not a registered layer".format(l.__name__)
# _check_args_exist(l.symbolic_function)
assert hasattr(l, '__argscope_enabled__'), "Argscope not supported for {}".format(l)
# need to deepcopy so that changes to new_scope does not affect outer scope
new_scope = copy.deepcopy(get_arg_scope())
......@@ -119,8 +113,7 @@ def enable_argscope_for_function(func, log_shape=True):
out_tensor_descr.shape.as_list()))
return out_tensor
# argscope requires this property
wrapped_func.symbolic_function = None
wrapped_func.__argscope_enabled__ = True
return wrapped_func
......
......@@ -6,12 +6,12 @@ from ..compat import tfv1 as tf
from tabulate import tabulate
from termcolor import colored
from .common import get_op_tensor_name
from ..utils import logger
__all__ = []
# TODO should also describe model_variables
def describe_trainable_vars():
"""
Print a description of the current model parameters.
......@@ -39,7 +39,7 @@ def describe_trainable_vars():
total += ele
total_bytes += ele * v.dtype.size
data.append([v.name, shape, ele, v.device, v.dtype.base_dtype.name])
data.append([get_op_tensor_name(v.name)[0], shape, ele, v.device, v.dtype.base_dtype.name])
headers = ['name', 'shape', '#elements', 'device', 'dtype']
dtypes = list(set([x[4] for x in data]))
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment