Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
229f2dac
Commit
229f2dac
authored
Mar 22, 2017
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
remove activation summary option. not well-designed
parent
f6b1499e
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
6 additions
and
21 deletions
+6
-21
examples/DeepQNetwork/DQN.py
examples/DeepQNetwork/DQN.py
+1
-1
tensorpack/models/common.py
tensorpack/models/common.py
+3
-18
tensorpack/tfutils/argscope.py
tensorpack/tfutils/argscope.py
+2
-2
No files found.
examples/DeepQNetwork/DQN.py
View file @
229f2dac
...
...
@@ -78,7 +78,7 @@ class Model(ModelDesc):
def
_get_DQN_prediction
(
self
,
image
):
""" image: [0,255]"""
image
=
image
/
255.0
with
argscope
(
Conv2D
,
nl
=
PReLU
.
f
,
use_bias
=
True
),
\
with
argscope
(
Conv2D
,
nl
=
PReLU
.
symbolic_function
,
use_bias
=
True
),
\
argscope
(
LeakyReLU
,
alpha
=
0.01
):
l
=
(
LinearWrap
(
image
)
.
Conv2D
(
'conv0'
,
out_channel
=
32
,
kernel_shape
=
5
)
...
...
tensorpack/models/common.py
View file @
229f2dac
...
...
@@ -9,7 +9,6 @@ import copy
from
..tfutils.argscope
import
get_arg_scope
from
..tfutils.modelutils
import
get_shape_str
from
..tfutils.summary
import
add_activation_summary
from
..utils
import
logger
from
..utils.develop
import
building_rtfd
...
...
@@ -51,16 +50,12 @@ def disable_layer_logging():
def
layer_register
(
summary_activation
=
False
,
log_shape
=
True
,
use_scope
=
True
):
"""
Register a layer.
Args:
summary_activation (bool): Define the default behavior of whether to
summary the output(activation) of this layer.
Can be overriden when creating the layer.
log_shape (bool): log input/output shape of this layer
use_scope (bool): whether to call this layer with an extra first argument as scope.
If set to False, will try to figure out whether the first argument
...
...
@@ -75,7 +70,7 @@ def layer_register(
args
=
args
[
1
:]
# actual positional args used to call func
assert
isinstance
(
name
,
six
.
string_types
),
name
else
:
assert
not
log_shape
and
not
summary_activation
assert
not
log_shape
if
isinstance
(
args
[
0
],
six
.
string_types
):
name
,
inputs
=
args
[
0
],
args
[
1
]
args
=
args
[
1
:]
# actual positional args used to call func
...
...
@@ -86,18 +81,15 @@ def layer_register(
(
isinstance
(
inputs
,
(
list
,
tuple
))
and
isinstance
(
inputs
[
0
],
(
tf
.
Tensor
,
tf
.
Variable
)))):
raise
ValueError
(
"Invalid inputs to layer: "
+
str
(
inputs
))
do_summary
=
kwargs
.
pop
(
'summary_activation'
,
summary_activation
)
# TODO use inspect.getcallargs to enhance?
# update from current argument scope
actual_args
=
copy
.
copy
(
get_arg_scope
()[
func
.
__name__
])
actual_args
.
update
(
kwargs
)
if
name
is
not
None
:
if
name
is
not
None
:
# use scope
with
tf
.
variable_scope
(
name
)
as
scope
:
do_log_shape
=
log_shape
and
scope
.
name
not
in
_LAYER_LOGGED
do_summary
=
do_summary
and
scope
.
name
not
in
_LAYER_LOGGED
if
do_log_shape
:
logger
.
info
(
"{} input: {}"
.
format
(
scope
.
name
,
get_shape_str
(
inputs
)))
...
...
@@ -109,19 +101,12 @@ def layer_register(
logger
.
info
(
"{} output: {}"
.
format
(
scope
.
name
,
get_shape_str
(
outputs
)))
_LAYER_LOGGED
.
add
(
scope
.
name
)
if
do_summary
:
if
isinstance
(
outputs
,
list
):
for
x
in
outputs
:
add_activation_summary
(
x
,
scope
.
name
)
else
:
add_activation_summary
(
outputs
,
scope
.
name
)
else
:
# run the actual function
outputs
=
func
(
*
args
,
**
actual_args
)
return
outputs
wrapped_func
.
f
=
func
# attribute to access the underlying function object
wrapped_func
.
symbolic_function
=
func
# attribute to access the underlying function object
wrapped_func
.
use_scope
=
use_scope
_register
(
func
.
__name__
,
wrapped_func
)
return
wrapped_func
...
...
tensorpack/tfutils/argscope.py
View file @
229f2dac
...
...
@@ -41,8 +41,8 @@ def argscope(layers, **kwargs):
assert
k
in
args
,
"No argument {} in {}"
.
format
(
k
,
l
.
__name__
)
for
l
in
layers
:
assert
hasattr
(
l
,
'
f
'
),
"{} is not a registered layer"
.
format
(
l
.
__name__
)
_check_args_exist
(
l
.
f
)
assert
hasattr
(
l
,
'
symbolic_function
'
),
"{} is not a registered layer"
.
format
(
l
.
__name__
)
_check_args_exist
(
l
.
symbolic_function
)
new_scope
=
copy
.
copy
(
get_arg_scope
())
for
l
in
layers
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment