Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
e5837873
Commit
e5837873
authored
May 05, 2017
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
rename EmptyObject
parent
73f94df2
Changes
4
Show whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
10 additions
and
9 deletions
+10
-9
tensorpack/models/common.py
tensorpack/models/common.py
+3
-2
tensorpack/models/conv2d.py
tensorpack/models/conv2d.py
+3
-3
tensorpack/models/fc.py
tensorpack/models/fc.py
+2
-2
tensorpack/models/nonlin.py
tensorpack/models/nonlin.py
+2
-2
No files found.
tensorpack/models/common.py
View file @
e5837873
...
@@ -16,10 +16,11 @@ from ..utils.develop import building_rtfd
...
@@ -16,10 +16,11 @@ from ..utils.develop import building_rtfd
_LAYER_LOGGED
=
set
()
_LAYER_LOGGED
=
set
()
_LAYER_REGISTERED
=
{}
_LAYER_REGISTERED
=
{}
__all__
=
[
'layer_register'
,
'disable_layer_logging'
,
'get_registered_layer'
,
'
EmptyObject
'
]
__all__
=
[
'layer_register'
,
'disable_layer_logging'
,
'get_registered_layer'
,
'
VariableHolder
'
]
class
EmptyObject
(
object
):
class
VariableHolder
(
object
):
""" A proxy to access variables defined in a layer. """
pass
pass
...
...
tensorpack/models/conv2d.py
View file @
e5837873
...
@@ -4,7 +4,7 @@
...
@@ -4,7 +4,7 @@
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
import
tensorflow
as
tf
import
tensorflow
as
tf
from
.common
import
layer_register
,
EmptyObject
from
.common
import
layer_register
,
VariableHolder
from
..utils.argtools
import
shape2d
,
shape4d
from
..utils.argtools
import
shape2d
,
shape4d
__all__
=
[
'Conv2D'
,
'Deconv2D'
]
__all__
=
[
'Conv2D'
,
'Deconv2D'
]
...
@@ -72,7 +72,7 @@ def Conv2D(x, out_channel, kernel_shape,
...
@@ -72,7 +72,7 @@ def Conv2D(x, out_channel, kernel_shape,
conv
=
tf
.
concat
(
outputs
,
channel_axis
)
conv
=
tf
.
concat
(
outputs
,
channel_axis
)
ret
=
nl
(
tf
.
nn
.
bias_add
(
conv
,
b
,
data_format
=
data_format
)
if
use_bias
else
conv
,
name
=
'output'
)
ret
=
nl
(
tf
.
nn
.
bias_add
(
conv
,
b
,
data_format
=
data_format
)
if
use_bias
else
conv
,
name
=
'output'
)
ret
.
variables
=
EmptyObject
()
ret
.
variables
=
VariableHolder
()
ret
.
variables
.
W
=
W
ret
.
variables
.
W
=
W
if
use_bias
:
if
use_bias
:
ret
.
variables
.
b
=
b
ret
.
variables
.
b
=
b
...
@@ -166,7 +166,7 @@ def Deconv2D(x, out_shape, kernel_shape,
...
@@ -166,7 +166,7 @@ def Deconv2D(x, out_shape, kernel_shape,
conv
.
set_shape
(
tf
.
TensorShape
([
None
]
+
shp3_static
))
conv
.
set_shape
(
tf
.
TensorShape
([
None
]
+
shp3_static
))
ret
=
nl
(
tf
.
nn
.
bias_add
(
conv
,
b
,
data_format
=
data_format
)
if
use_bias
else
conv
,
name
=
'output'
)
ret
=
nl
(
tf
.
nn
.
bias_add
(
conv
,
b
,
data_format
=
data_format
)
if
use_bias
else
conv
,
name
=
'output'
)
ret
.
variables
=
EmptyObject
()
ret
.
variables
=
VariableHolder
()
ret
.
variables
.
W
=
W
ret
.
variables
.
W
=
W
if
use_bias
:
if
use_bias
:
ret
.
variables
.
b
=
b
ret
.
variables
.
b
=
b
...
...
tensorpack/models/fc.py
View file @
e5837873
...
@@ -5,7 +5,7 @@
...
@@ -5,7 +5,7 @@
import
tensorflow
as
tf
import
tensorflow
as
tf
from
.common
import
layer_register
,
EmptyObject
from
.common
import
layer_register
,
VariableHolder
from
..tfutils
import
symbolic_functions
as
symbf
from
..tfutils
import
symbolic_functions
as
symbf
__all__
=
[
'FullyConnected'
]
__all__
=
[
'FullyConnected'
]
...
@@ -48,7 +48,7 @@ def FullyConnected(x, out_dim,
...
@@ -48,7 +48,7 @@ def FullyConnected(x, out_dim,
prod
=
tf
.
nn
.
xw_plus_b
(
x
,
W
,
b
)
if
use_bias
else
tf
.
matmul
(
x
,
W
)
prod
=
tf
.
nn
.
xw_plus_b
(
x
,
W
,
b
)
if
use_bias
else
tf
.
matmul
(
x
,
W
)
ret
=
nl
(
prod
,
name
=
'output'
)
ret
=
nl
(
prod
,
name
=
'output'
)
ret
.
variables
=
EmptyObject
()
ret
.
variables
=
VariableHolder
()
ret
.
variables
.
W
=
W
ret
.
variables
.
W
=
W
if
use_bias
:
if
use_bias
:
ret
.
variables
.
b
=
b
ret
.
variables
.
b
=
b
...
...
tensorpack/models/nonlin.py
View file @
e5837873
...
@@ -5,7 +5,7 @@
...
@@ -5,7 +5,7 @@
import
tensorflow
as
tf
import
tensorflow
as
tf
from
.common
import
layer_register
,
EmptyObject
from
.common
import
layer_register
,
VariableHolder
from
.batch_norm
import
BatchNorm
from
.batch_norm
import
BatchNorm
__all__
=
[
'Maxout'
,
'PReLU'
,
'LeakyReLU'
,
'BNReLU'
]
__all__
=
[
'Maxout'
,
'PReLU'
,
'LeakyReLU'
,
'BNReLU'
]
...
@@ -56,7 +56,7 @@ def PReLU(x, init=0.001, name='output'):
...
@@ -56,7 +56,7 @@ def PReLU(x, init=0.001, name='output'):
x
=
((
1
+
alpha
)
*
x
+
(
1
-
alpha
)
*
tf
.
abs
(
x
))
x
=
((
1
+
alpha
)
*
x
+
(
1
-
alpha
)
*
tf
.
abs
(
x
))
ret
=
tf
.
multiply
(
x
,
0.5
,
name
=
name
)
ret
=
tf
.
multiply
(
x
,
0.5
,
name
=
name
)
ret
.
variables
=
EmptyObject
()
ret
.
variables
=
VariableHolder
()
ret
.
variables
.
alpha
=
alpha
ret
.
variables
.
alpha
=
alpha
return
ret
return
ret
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment