Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
e515ad88
Commit
e515ad88
authored
Jan 28, 2018
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fix cifar lr decay
parent
2f610df6
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
5 additions
and
3 deletions
+5
-3
examples/cifar-convnet.py
examples/cifar-convnet.py
+2
-2
tensorpack/tfutils/optimizer.py
tensorpack/tfutils/optimizer.py
+3
-1
No files found.
examples/cifar-convnet.py
View file @
e515ad88
...
@@ -123,8 +123,8 @@ def get_config(cifar_classnum):
...
@@ -123,8 +123,8 @@ def get_config(cifar_classnum):
ModelSaver
(),
ModelSaver
(),
InferenceRunner
(
dataset_test
,
InferenceRunner
(
dataset_test
,
ScalarStats
([
'accuracy'
,
'cost'
])),
ScalarStats
([
'accuracy'
,
'cost'
])),
StatMonitorParamSetter
(
'learning_rate'
,
'val
_error
'
,
lr_func
,
StatMonitorParamSetter
(
'learning_rate'
,
'val
idation_accuracy
'
,
lr_func
,
threshold
=
0.001
,
last_k
=
10
),
threshold
=
0.001
,
last_k
=
10
,
reverse
=
True
),
],
],
max_epoch
=
150
,
max_epoch
=
150
,
)
)
...
...
tensorpack/tfutils/optimizer.py
View file @
e515ad88
...
@@ -5,7 +5,7 @@
...
@@ -5,7 +5,7 @@
import
tensorflow
as
tf
import
tensorflow
as
tf
from
contextlib
import
contextmanager
from
contextlib
import
contextmanager
from
.gradproc
import
FilterNoneGrad
from
.gradproc
import
FilterNoneGrad
,
GradientProcessor
__all__
=
[
'apply_grad_processors'
,
'ProxyOptimizer'
,
__all__
=
[
'apply_grad_processors'
,
'ProxyOptimizer'
,
'PostProcessOptimizer'
,
'VariableAssignmentOptimizer'
,
'PostProcessOptimizer'
,
'VariableAssignmentOptimizer'
,
...
@@ -48,6 +48,8 @@ def apply_grad_processors(opt, gradprocs):
...
@@ -48,6 +48,8 @@ def apply_grad_processors(opt, gradprocs):
processors before updating the variables.
processors before updating the variables.
"""
"""
assert
isinstance
(
gradprocs
,
(
list
,
tuple
)),
gradprocs
assert
isinstance
(
gradprocs
,
(
list
,
tuple
)),
gradprocs
for
gp
in
gradprocs
:
assert
isinstance
(
gp
,
GradientProcessor
),
gp
class
_ApplyGradientProcessor
(
ProxyOptimizer
):
class
_ApplyGradientProcessor
(
ProxyOptimizer
):
def
__init__
(
self
,
opt
,
gradprocs
):
def
__init__
(
self
,
opt
,
gradprocs
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment