Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
7f3baaa1
Commit
7f3baaa1
authored
Dec 26, 2015
by
ppwwyyxx
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
monitor cost
parent
4267ea4d
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
34 additions
and
25 deletions
+34
-25
example_mnist.py
example_mnist.py
+11
-10
utils/extension.py
utils/extension.py
+10
-4
utils/symbolic_functions.py
utils/symbolic_functions.py
+13
-11
No files found.
example_mnist.py
View file @
7f3baaa1
...
...
@@ -34,6 +34,7 @@ def get_model(input, label):
output: variable
cost: scalar variable
"""
# use this dropout variable! it will be set to 1 at test time
keep_prob
=
tf
.
placeholder
(
tf
.
float32
,
name
=
'dropout_prob'
)
input
=
tf
.
reshape
(
input
,
[
-
1
,
IMAGE_SIZE
,
IMAGE_SIZE
,
1
])
...
...
@@ -57,10 +58,11 @@ def get_model(input, label):
fc1
=
FullyConnected
(
'lr'
,
fc0
,
out_dim
=
10
)
prob
=
tf
.
nn
.
softmax
(
fc1
,
name
=
'output'
)
logprob
=
logSoftmax
(
fc1
)
y
=
one_hot
(
label
,
NUM_CLASS
)
cost
=
tf
.
reduce_sum
(
-
y
*
logprob
,
1
)
cost
=
tf
.
reduce_mean
(
cost
,
name
=
'cost'
)
cost
=
tf
.
nn
.
softmax_cross_entropy_with_logits
(
fc1
,
y
)
#logprob = logSoftmax(fc1)
#cost = tf.reduce_sum(-y * logprob, 1)
cost
=
tf
.
reduce_sum
(
cost
,
name
=
'cost'
)
tf
.
scalar_summary
(
cost
.
op
.
name
,
cost
)
return
prob
,
cost
...
...
@@ -74,28 +76,27 @@ def main():
prefix
=
'test'
,
period
=
2
),
PeriodicSaver
(
LOG_DIR
,
period
=
2
)
]
optimizer
=
tf
.
train
.
AdamOptimizer
(
1e-4
)
sess_config
=
tf
.
ConfigProto
()
sess_config
.
device_count
[
'GPU'
]
=
1
with
tf
.
Graph
()
.
as_default
():
G
=
tf
.
get_default_graph
()
input_var
=
tf
.
placeholder
(
tf
.
float32
,
shape
=
(
None
,
IMAGE_SIZE
,
IMAGE_SIZE
),
name
=
'input'
)
label_var
=
tf
.
placeholder
(
tf
.
int32
,
shape
=
(
None
,),
name
=
'label'
)
prob
,
cost
=
get_model
(
input_var
,
label_var
)
optimizer
=
tf
.
train
.
AdamOptimizer
(
1e-4
)
train_op
=
optimizer
.
minimize
(
cost
)
for
ext
in
extensions
:
ext
.
init
()
summary_op
=
tf
.
merge_all_summaries
()
config
=
tf
.
ConfigProto
()
config
.
device_count
[
'GPU'
]
=
1
sess
=
tf
.
Session
(
config
=
config
)
sess
=
tf
.
Session
(
config
=
sess_config
)
sess
.
run
(
tf
.
initialize_all_variables
())
summary_writer
=
tf
.
train
.
SummaryWriter
(
LOG_DIR
,
graph_def
=
sess
.
graph_def
)
g
=
tf
.
get_default_graph
()
keep_prob
=
g
.
get_tensor_by_name
(
'dropout_prob:0'
)
keep_prob
=
G
.
get_tensor_by_name
(
'dropout_prob:0'
)
with
sess
.
as_default
():
for
epoch
in
count
(
1
):
for
(
img
,
label
)
in
BatchData
(
dataset_train
,
batch_size
)
.
get_data
():
...
...
utils/extension.py
View file @
7f3baaa1
...
...
@@ -60,21 +60,27 @@ class OnehotClassificationValidation(PeriodicExtension):
correct
=
tf
.
equal
(
tf
.
cast
(
tf
.
argmax
(
self
.
output_var
,
1
),
tf
.
int32
),
self
.
label_var
)
# TODO: add cost
self
.
nr_correct_var
=
tf
.
reduce_sum
(
tf
.
cast
(
correct
,
tf
.
int32
))
self
.
cost_var
=
self
.
graph
.
get_tensor_by_name
(
'cost:0'
)
def
_trigger
(
self
):
cnt
=
0
cnt_correct
=
0
sess
=
tf
.
get_default_session
()
cost_sum
=
0
for
(
img
,
label
)
in
self
.
ds
.
get_data
():
feed
=
{
self
.
input_var
:
img
,
self
.
label_var
:
label
,
self
.
dropout_var
:
1.0
}
cnt
+=
img
.
shape
[
0
]
cnt_correct
+=
self
.
nr_correct_var
.
eval
(
feed_dict
=
feed
)
correct
,
cost
=
sess
.
run
([
self
.
nr_correct_var
,
self
.
cost_var
],
feed_dict
=
feed
)
cnt_correct
+=
correct
cost_sum
+=
cost
cost_sum
/=
cnt
# TODO write to summary?
print
"A
ccuracy at epoch {}:
{}"
.
format
(
self
.
epoch_num
,
cnt_correct
*
1.0
/
cnt
)
print
"A
fter epoch {}: acc={}, cost=
{}"
.
format
(
self
.
epoch_num
,
cnt_correct
*
1.0
/
cnt
,
cost_sum
)
class
PeriodicSaver
(
PeriodicExtension
):
...
...
utils/symbolic_functions.py
View file @
7f3baaa1
...
...
@@ -8,6 +8,7 @@ import numpy as np
__all__
=
[
'one_hot'
,
'batch_flatten'
,
'logSoftmax'
]
def
one_hot
(
y
,
num_labels
):
with
tf
.
variable_scope
(
'one_hot'
):
batch_size
=
tf
.
size
(
y
)
y
=
tf
.
expand_dims
(
y
,
1
)
indices
=
tf
.
expand_dims
(
tf
.
range
(
0
,
batch_size
),
1
)
...
...
@@ -22,6 +23,7 @@ def batch_flatten(x):
return
tf
.
reshape
(
x
,
[
-
1
,
total_dim
])
def
logSoftmax
(
x
):
with
tf
.
variable_scope
(
'logSoftmax'
):
z
=
x
-
tf
.
reduce_max
(
x
,
1
,
keep_dims
=
True
)
logprob
=
z
-
tf
.
log
(
tf
.
reduce_sum
(
tf
.
exp
(
z
),
1
,
keep_dims
=
True
))
return
logprob
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment