Commit e515ad88 authored by Yuxin Wu's avatar Yuxin Wu

fix cifar lr decay

parent 2f610df6
......@@ -123,8 +123,8 @@ def get_config(cifar_classnum):
ModelSaver(),
InferenceRunner(dataset_test,
ScalarStats(['accuracy', 'cost'])),
StatMonitorParamSetter('learning_rate', 'val_error', lr_func,
threshold=0.001, last_k=10),
StatMonitorParamSetter('learning_rate', 'validation_accuracy', lr_func,
threshold=0.001, last_k=10, reverse=True),
],
max_epoch=150,
)
......
......@@ -5,7 +5,7 @@
import tensorflow as tf
from contextlib import contextmanager
from .gradproc import FilterNoneGrad
from .gradproc import FilterNoneGrad, GradientProcessor
__all__ = ['apply_grad_processors', 'ProxyOptimizer',
'PostProcessOptimizer', 'VariableAssignmentOptimizer',
......@@ -48,6 +48,8 @@ def apply_grad_processors(opt, gradprocs):
processors before updating the variables.
"""
assert isinstance(gradprocs, (list, tuple)), gradprocs
for gp in gradprocs:
assert isinstance(gp, GradientProcessor), gp
class _ApplyGradientProcessor(ProxyOptimizer):
def __init__(self, opt, gradprocs):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment