Commit ebf2332b authored by Yuxin Wu's avatar Yuxin Wu

remove some deprecations

parent 709f89a9
......@@ -5,7 +5,6 @@
import tensorflow as tf
from abc import ABCMeta
import six
from ..utils.develop import log_deprecated
from ..tfutils.common import get_op_or_tensor_by_name
__all__ = ['Callback', 'ProxyCallback', 'CallbackFactory']
......@@ -284,12 +283,9 @@ class CallbackFactory(Callback):
Create a callback with some lambdas.
"""
def __init__(self, setup_graph=None, before_train=None, trigger=None,
after_train=None, trigger_epoch=None):
after_train=None):
"""
Each lambda takes ``self`` as the only argument.
Note:
trigger_epoch was deprecated.
"""
self._cb_setup_graph = setup_graph
......@@ -297,10 +293,6 @@ class CallbackFactory(Callback):
self._cb_trigger = trigger
self._cb_after_train = after_train
if trigger_epoch:
self._cb_trigger = trigger_epoch
log_deprecated("CallbackFactory(trigger_epoch=)", "Use trigger instead.", "2017-11-15")
def _setup_graph(self):
if self._cb_setup_graph:
self._cb_setup_graph(self)
......
......@@ -127,10 +127,7 @@ class InferenceRunner(InferenceRunnerBase):
return InferencerToHook(inf, fetches)
def _setup_graph(self):
if self.trainer._API_VERSION == 1 and self.trainer._config.predict_tower is not None:
device = self.trainer._config.predict_tower[0]
else:
device = self._device
device = self._device
assert self.trainer.tower_func is not None, "You must set tower_func of the trainer to use InferenceRunner!"
input_callbacks = self._input_source.setup(self.trainer.inputs_desc)
......
......@@ -110,7 +110,7 @@ class DataFromGenerator(DataFlow):
else:
self._gen = gen
if size is not None:
log_deprecated("DataFromGenerator(size=)", "It doesn't make much sense.")
log_deprecated("DataFromGenerator(size=)", "It doesn't make much sense.", "2018-03-31")
def get_data(self):
# yield from
......
......@@ -516,7 +516,7 @@ class StagingInput(FeedfreeInput):
assert isinstance(input, FeedfreeInput), input
self._input = input
if towers is not None:
log_deprecated("StagingInput(towers=) has no effect! Devices are handled automatically.")
log_deprecated("StagingInput(towers=)", "Devices are handled automatically.", "2018-03-31")
self._nr_stage = nr_stage
self._areas = []
......
......@@ -6,7 +6,6 @@
import tensorflow as tf
from .common import layer_register, VariableHolder, rename_get_variable
from ..utils.argtools import shape2d, shape4d
from ..utils.develop import log_deprecated
__all__ = ['Conv2D', 'Deconv2D']
......@@ -113,16 +112,7 @@ def Deconv2D(x, out_channel, kernel_shape,
in_channel = in_shape[channel_axis]
assert in_channel is not None, "[Deconv2D] Input cannot have unknown channel!"
out_shape = out_channel
if isinstance(out_shape, int):
out_channel = out_shape
else:
log_deprecated("Deconv2D(out_shape=[...])",
"Use an integer 'out_channel' instead!", "2017-11-18")
for k in out_shape:
if not isinstance(k, int):
raise ValueError("[Deconv2D] out_shape {} is invalid!".format(k))
out_channel = out_shape[channel_axis - 1] # out_shape doesn't have batch
assert isinstance(out_channel, int), out_channel
if W_init is None:
W_init = tf.contrib.layers.xavier_initializer_conv2d()
......@@ -141,11 +131,6 @@ def Deconv2D(x, out_channel, kernel_shape,
trainable=True)
ret = layer.apply(x, scope=tf.get_variable_scope())
# Check that we only supports out_shape = in_shape * stride
out_shape3 = ret.get_shape().as_list()[1:]
if not isinstance(out_shape, int):
assert list(out_shape) == out_shape3, "{} != {}".format(out_shape, out_shape3)
ret.variables = VariableHolder(W=layer.kernel)
if use_bias:
ret.variables.b = layer.bias
......
This diff is collapsed.
......@@ -220,7 +220,7 @@ def add_moving_summary(*args, **kwargs):
if not isinstance(args[0], list):
v = args
else:
log_deprecated("Call add_moving_summary with positional args instead of a list!")
log_deprecated("Call add_moving_summary with positional args instead of a list!", eos="2018-02-28")
v = args[0]
for x in v:
assert isinstance(x, tf.Tensor), x
......
......@@ -21,7 +21,7 @@ from ..callbacks.steps import MaintainStepCounter
from .config import TrainConfig, DEFAULT_MONITORS, DEFAULT_CALLBACKS
__all__ = ['StopTraining', 'TrainConfig', 'Trainer']
__all__ = ['StopTraining', 'Trainer']
class StopTraining(BaseException):
......
......@@ -12,7 +12,6 @@ from ..utils import logger
from ..tfutils import (JustCurrentSession, SessionInit)
from ..tfutils.sesscreate import NewSessionCreator
from ..input_source import InputSource
from ..utils.develop import log_deprecated
__all__ = ['TrainConfig', 'DEFAULT_CALLBACKS', 'DEFAULT_MONITORS']
......@@ -151,15 +150,6 @@ class TrainConfig(object):
assert self.nr_tower == 1, "Cannot set both nr_tower and tower in TrainConfig!"
self.tower = tower
predict_tower = kwargs.pop('predict_tower', None)
if predict_tower is not None:
log_deprecated("TrainConfig(predict_tower=)",
"InferenceRunner now accepts a 'device' argument.", "2017-12-31")
self.predict_tower = predict_tower
if isinstance(self.predict_tower, int):
self.predict_tower = [self.predict_tower]
# --------------------------------------------------------------
assert len(kwargs) == 0, 'Unknown arguments: {}'.format(str(kwargs.keys()))
@property
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment