Commit e515ad88 authored by Yuxin Wu's avatar Yuxin Wu

fix cifar lr decay

parent 2f610df6
...@@ -123,8 +123,8 @@ def get_config(cifar_classnum): ...@@ -123,8 +123,8 @@ def get_config(cifar_classnum):
ModelSaver(), ModelSaver(),
InferenceRunner(dataset_test, InferenceRunner(dataset_test,
ScalarStats(['accuracy', 'cost'])), ScalarStats(['accuracy', 'cost'])),
StatMonitorParamSetter('learning_rate', 'val_error', lr_func, StatMonitorParamSetter('learning_rate', 'validation_accuracy', lr_func,
threshold=0.001, last_k=10), threshold=0.001, last_k=10, reverse=True),
], ],
max_epoch=150, max_epoch=150,
) )
......
...@@ -5,7 +5,7 @@ ...@@ -5,7 +5,7 @@
import tensorflow as tf import tensorflow as tf
from contextlib import contextmanager from contextlib import contextmanager
from .gradproc import FilterNoneGrad from .gradproc import FilterNoneGrad, GradientProcessor
__all__ = ['apply_grad_processors', 'ProxyOptimizer', __all__ = ['apply_grad_processors', 'ProxyOptimizer',
'PostProcessOptimizer', 'VariableAssignmentOptimizer', 'PostProcessOptimizer', 'VariableAssignmentOptimizer',
...@@ -48,6 +48,8 @@ def apply_grad_processors(opt, gradprocs): ...@@ -48,6 +48,8 @@ def apply_grad_processors(opt, gradprocs):
processors before updating the variables. processors before updating the variables.
""" """
assert isinstance(gradprocs, (list, tuple)), gradprocs assert isinstance(gradprocs, (list, tuple)), gradprocs
for gp in gradprocs:
assert isinstance(gp, GradientProcessor), gp
class _ApplyGradientProcessor(ProxyOptimizer): class _ApplyGradientProcessor(ProxyOptimizer):
def __init__(self, opt, gradprocs): def __init__(self, opt, gradprocs):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment