Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
78ce3a96
Commit
78ce3a96
authored
Jan 01, 2016
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
summary for all parameters
parent
d8330092
Changes
5
Show whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
20 additions
and
16 deletions
+20
-16
example_alexnet.py
example_alexnet.py
+1
-1
example_cifar10.py
example_cifar10.py
+2
-2
example_mnist.py
example_mnist.py
+1
-1
tensorpack/train.py
tensorpack/train.py
+10
-9
tensorpack/utils/summary.py
tensorpack/utils/summary.py
+6
-3
No files found.
example_alexnet.py
View file @
78ce3a96
...
@@ -72,7 +72,7 @@ def get_model(inputs, is_training):
...
@@ -72,7 +72,7 @@ def get_model(inputs, is_training):
name
=
'regularize_loss'
)
name
=
'regularize_loss'
)
tf
.
add_to_collection
(
MOVING_SUMMARY_VARS_KEY
,
wd_cost
)
tf
.
add_to_collection
(
MOVING_SUMMARY_VARS_KEY
,
wd_cost
)
add_
histog
ram_summary
(
'.*/W'
)
# monitor histogram of all W
add_
pa
ram_summary
(
'.*/W'
)
# monitor histogram of all W
return
[
prob
,
nr_wrong
],
tf
.
add_n
([
wd_cost
,
cost
],
name
=
'cost'
)
return
[
prob
,
nr_wrong
],
tf
.
add_n
([
wd_cost
,
cost
],
name
=
'cost'
)
def
get_config
():
def
get_config
():
...
...
example_cifar10.py
View file @
78ce3a96
...
@@ -73,7 +73,7 @@ def get_model(inputs, is_training):
...
@@ -73,7 +73,7 @@ def get_model(inputs, is_training):
name
=
'regularize_loss'
)
name
=
'regularize_loss'
)
tf
.
add_to_collection
(
MOVING_SUMMARY_VARS_KEY
,
wd_cost
)
tf
.
add_to_collection
(
MOVING_SUMMARY_VARS_KEY
,
wd_cost
)
add_
histogram_summary
(
'.*/W'
)
# monitor histogram of all W
add_
param_summary
(
'.*'
)
# monitor all variables
return
[
prob
,
nr_wrong
],
tf
.
add_n
([
wd_cost
,
cost
],
name
=
'cost'
)
return
[
prob
,
nr_wrong
],
tf
.
add_n
([
wd_cost
,
cost
],
name
=
'cost'
)
def
get_config
():
def
get_config
():
...
@@ -155,5 +155,5 @@ if __name__ == '__main__':
...
@@ -155,5 +155,5 @@ if __name__ == '__main__':
if
args
.
load
:
if
args
.
load
:
config
.
session_init
=
SaverRestore
(
args
.
load
)
config
.
session_init
=
SaverRestore
(
args
.
load
)
if
args
.
gpu
:
if
args
.
gpu
:
config
[
'nr_tower'
]
=
len
(
args
.
gpu
.
split
(
','
))
config
.
nr_tower
=
len
(
args
.
gpu
.
split
(
','
))
start_train
(
config
)
start_train
(
config
)
example_mnist.py
View file @
78ce3a96
...
@@ -82,7 +82,7 @@ def get_model(inputs, is_training):
...
@@ -82,7 +82,7 @@ def get_model(inputs, is_training):
name
=
'regularize_loss'
)
name
=
'regularize_loss'
)
tf
.
add_to_collection
(
MOVING_SUMMARY_VARS_KEY
,
wd_cost
)
tf
.
add_to_collection
(
MOVING_SUMMARY_VARS_KEY
,
wd_cost
)
add_
histog
ram_summary
(
'.*/W'
)
# monitor histogram of all W
add_
pa
ram_summary
(
'.*/W'
)
# monitor histogram of all W
return
[
prob
,
nr_wrong
],
tf
.
add_n
([
wd_cost
,
cost
],
name
=
'cost'
)
return
[
prob
,
nr_wrong
],
tf
.
add_n
([
wd_cost
,
cost
],
name
=
'cost'
)
def
get_config
():
def
get_config
():
...
...
tensorpack/train.py
View file @
78ce3a96
...
@@ -140,6 +140,7 @@ def start_train(config):
...
@@ -140,6 +140,7 @@ def start_train(config):
grads
=
[]
grads
=
[]
for
i
in
range
(
config
.
nr_tower
):
for
i
in
range
(
config
.
nr_tower
):
with
tf
.
device
(
'/gpu:{}'
.
format
(
i
)):
with
tf
.
device
(
'/gpu:{}'
.
format
(
i
)):
with
tf
.
name_scope
(
'tower{}'
.
format
(
i
))
as
scope
:
model_inputs
=
get_model_inputs
()
model_inputs
=
get_model_inputs
()
output_vars
,
cost_var
=
config
.
get_model_func
(
model_inputs
,
is_training
=
True
)
output_vars
,
cost_var
=
config
.
get_model_func
(
model_inputs
,
is_training
=
True
)
grads
.
append
(
grads
.
append
(
...
...
tensorpack/utils/summary.py
View file @
78ce3a96
...
@@ -32,15 +32,18 @@ def add_activation_summary(x, name=None):
...
@@ -32,15 +32,18 @@ def add_activation_summary(x, name=None):
tf
.
histogram_summary
(
name
+
'/activations'
,
x
)
tf
.
histogram_summary
(
name
+
'/activations'
,
x
)
tf
.
scalar_summary
(
name
+
'/sparsity'
,
tf
.
nn
.
zero_fraction
(
x
))
tf
.
scalar_summary
(
name
+
'/sparsity'
,
tf
.
nn
.
zero_fraction
(
x
))
def
add_
histog
ram_summary
(
regex
):
def
add_
pa
ram_summary
(
regex
):
"""
"""
Add
histogram
summary for all trainable variables matching the regex
Add summary for all trainable variables matching the regex
"""
"""
import
re
import
re
params
=
tf
.
get_collection
(
tf
.
GraphKeys
.
TRAINABLE_VARIABLES
)
params
=
tf
.
get_collection
(
tf
.
GraphKeys
.
TRAINABLE_VARIABLES
)
for
p
in
params
:
for
p
in
params
:
name
=
p
.
name
name
=
p
.
name
if
re
.
search
(
regex
,
name
):
if
re
.
search
(
regex
,
name
):
if
p
.
get_shape
()
.
ndims
==
0
:
tf
.
scalar_summary
(
name
,
p
)
else
:
tf
.
histogram_summary
(
name
,
p
)
tf
.
histogram_summary
(
name
,
p
)
def
summary_moving_average
(
cost_var
):
def
summary_moving_average
(
cost_var
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment