Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
0e7f338c
Commit
0e7f338c
authored
Apr 23, 2016
by
Weiran He
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
bugfix: Maxout & add NonLinearity Layer
parent
67f37f29
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
11 additions
and
3 deletions
+11
-3
tensorpack/models/nonlin.py
tensorpack/models/nonlin.py
+11
-3
No files found.
tensorpack/models/nonlin.py
View file @
0e7f338c
...
@@ -9,7 +9,7 @@ from copy import copy
...
@@ -9,7 +9,7 @@ from copy import copy
from
._common
import
*
from
._common
import
*
from
.batch_norm
import
BatchNorm
from
.batch_norm
import
BatchNorm
__all__
=
[
'Maxout'
,
'PReLU'
,
'LeakyReLU'
,
'BNReLU'
]
__all__
=
[
'Maxout'
,
'PReLU'
,
'LeakyReLU'
,
'BNReLU'
,
'NonLinearity'
]
@
layer_register
(
log_shape
=
False
)
@
layer_register
(
log_shape
=
False
)
def
Maxout
(
x
,
num_unit
):
def
Maxout
(
x
,
num_unit
):
...
@@ -24,7 +24,7 @@ def Maxout(x, num_unit):
...
@@ -24,7 +24,7 @@ def Maxout(x, num_unit):
assert
len
(
input_shape
)
==
4
assert
len
(
input_shape
)
==
4
ch
=
input_shape
[
3
]
ch
=
input_shape
[
3
]
assert
ch
%
num_unit
==
0
assert
ch
%
num_unit
==
0
x
=
tf
.
reshape
(
x
,
[
-
1
,
input_shape
[
1
],
input_shape
[
2
],
ch
/
3
,
3
])
x
=
tf
.
reshape
(
x
,
[
-
1
,
input_shape
[
1
],
input_shape
[
2
],
ch
/
num_unit
,
num_unit
])
return
tf
.
reduce_max
(
x
,
4
,
name
=
'output'
)
return
tf
.
reduce_max
(
x
,
4
,
name
=
'output'
)
@
layer_register
(
log_shape
=
False
)
@
layer_register
(
log_shape
=
False
)
...
@@ -66,10 +66,18 @@ def BNReLU(is_training, **kwargs):
...
@@ -66,10 +66,18 @@ def BNReLU(is_training, **kwargs):
"""
"""
:param is_traning: boolean
:param is_traning: boolean
:param kwargs: args for BatchNorm
:param kwargs: args for BatchNorm
:returns: a activation function that performs BN + ReLU (a too common combination)
:returns: a
n
activation function that performs BN + ReLU (a too common combination)
"""
"""
def
BNReLU
(
x
,
name
=
None
):
def
BNReLU
(
x
,
name
=
None
):
x
=
BatchNorm
(
'bn'
,
x
,
is_training
,
**
kwargs
)
x
=
BatchNorm
(
'bn'
,
x
,
is_training
,
**
kwargs
)
x
=
tf
.
nn
.
relu
(
x
,
name
=
name
)
x
=
tf
.
nn
.
relu
(
x
,
name
=
name
)
return
x
return
x
return
BNReLU
return
BNReLU
@
layer_register
(
log_shape
=
False
)
def
NonLinearity
(
x
,
nl
):
"""
:param input: any tensor.
:param nl: any Tensorflow Operation
"""
return
nl
(
x
,
name
=
'output'
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment