Commit e5837873 authored by Yuxin Wu's avatar Yuxin Wu

rename EmptyObject

parent 73f94df2
......@@ -16,10 +16,11 @@ from ..utils.develop import building_rtfd
_LAYER_LOGGED = set()
_LAYER_REGISTERED = {}
__all__ = ['layer_register', 'disable_layer_logging', 'get_registered_layer', 'EmptyObject']
__all__ = ['layer_register', 'disable_layer_logging', 'get_registered_layer', 'VariableHolder']
class EmptyObject(object):
class VariableHolder(object):
""" A proxy to access variables defined in a layer. """
pass
......
......@@ -4,7 +4,7 @@
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
import tensorflow as tf
from .common import layer_register, EmptyObject
from .common import layer_register, VariableHolder
from ..utils.argtools import shape2d, shape4d
__all__ = ['Conv2D', 'Deconv2D']
......@@ -72,7 +72,7 @@ def Conv2D(x, out_channel, kernel_shape,
conv = tf.concat(outputs, channel_axis)
ret = nl(tf.nn.bias_add(conv, b, data_format=data_format) if use_bias else conv, name='output')
ret.variables = EmptyObject()
ret.variables = VariableHolder()
ret.variables.W = W
if use_bias:
ret.variables.b = b
......@@ -166,7 +166,7 @@ def Deconv2D(x, out_shape, kernel_shape,
conv.set_shape(tf.TensorShape([None] + shp3_static))
ret = nl(tf.nn.bias_add(conv, b, data_format=data_format) if use_bias else conv, name='output')
ret.variables = EmptyObject()
ret.variables = VariableHolder()
ret.variables.W = W
if use_bias:
ret.variables.b = b
......
......@@ -5,7 +5,7 @@
import tensorflow as tf
from .common import layer_register, EmptyObject
from .common import layer_register, VariableHolder
from ..tfutils import symbolic_functions as symbf
__all__ = ['FullyConnected']
......@@ -48,7 +48,7 @@ def FullyConnected(x, out_dim,
prod = tf.nn.xw_plus_b(x, W, b) if use_bias else tf.matmul(x, W)
ret = nl(prod, name='output')
ret.variables = EmptyObject()
ret.variables = VariableHolder()
ret.variables.W = W
if use_bias:
ret.variables.b = b
......
......@@ -5,7 +5,7 @@
import tensorflow as tf
from .common import layer_register, EmptyObject
from .common import layer_register, VariableHolder
from .batch_norm import BatchNorm
__all__ = ['Maxout', 'PReLU', 'LeakyReLU', 'BNReLU']
......@@ -56,7 +56,7 @@ def PReLU(x, init=0.001, name='output'):
x = ((1 + alpha) * x + (1 - alpha) * tf.abs(x))
ret = tf.multiply(x, 0.5, name=name)
ret.variables = EmptyObject()
ret.variables = VariableHolder()
ret.variables.alpha = alpha
return ret
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment