Commit b48cd060 authored by Yuxin Wu's avatar Yuxin Wu

use global_step utilities from TF

parent 461d20cd
......@@ -4,11 +4,10 @@
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
import tensorflow as tf
from tensorflow.python.training import training_util
from six.moves import map
from ..utils.argtools import graph_memoized
from ..utils.naming import GLOBAL_STEP_OP_NAME
__all__ = ['get_default_sess_config',
'get_global_step_value',
'get_global_step_var',
......@@ -58,11 +57,9 @@ def get_global_step_var():
scope = tf.get_variable_scope()
assert scope.name == '', \
"The global_step variable should be created under the root variable scope!"
with tf.variable_scope(scope, reuse=False), \
tf.name_scope(None):
var = tf.get_variable(GLOBAL_STEP_OP_NAME,
initializer=tf.constant(0, dtype=tf.int64),
trainable=False, dtype=tf.int64)
assert not scope.reuse, \
"The global_step variable shouldn't be called under a reuse variable scope!"
var = training_util.get_or_create_global_step()
return var
......
......@@ -3,9 +3,6 @@
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
import tensorflow as tf
# this is also the name used by tf.train.get_global_step
GLOBAL_STEP_OP_NAME = 'global_step'
GLOBAL_STEP_VAR_NAME = 'global_step:0'
GLOBAL_STEP_INCR_OP_NAME = 'global_step_incr'
GLOBAL_STEP_INCR_VAR_NAME = 'global_step_incr:0'
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment