Commit 4057c531 authored by Yuxin Wu's avatar Yuxin Wu

Remove the old ModelDesc interface (which was deprecated 1 year ago).

parent b8a50d72
......@@ -45,11 +45,12 @@ http://tensorpack.readthedocs.io/tutorial/performance-tuning.html
before posting.
If you expect certain accuracy, only in one of the two conditions can we help with it:
(1) You're unable to match the accuracy documented in tensorpack examples.
(1) You're unable to reproduce the accuracy documented in tensorpack examples.
(2) It appears to be a tensorpack bug.
Otherwise, how to train a model to certain accuracy is a machine learning question and is
not our responsibility to figure out.
Otherwise, how to train a model to certain accuracy is a machine learning question.
We do not answer machine learning questions and it is your responsibility to
figure out how to make your models more accurate.
### 4. Your environment:
+ Python version:
......
......@@ -8,7 +8,8 @@ so you don't need to look at here very often.
Here are a list of things that were changed, starting from an early version.
TensorFlow itself also changes API and those are not listed here.
+ [2019/03/20] The concept of `InputDesc` was replaced by its equivalent in TF:
+ [2019/03/20](https://github.com/tensorpack/tensorpack/commit/b8a50d72a7c655b6dc6facb17efd74069ba7f86c).
The concept of `InputDesc` was replaced by its equivalent in TF:
`tf.TensorSpec`. This may be a breaking change if you have customized
code that relies on internals of `InputDesc`.
+ [2018/08/27] msgpack is used again for "serialization to disk", because pyarrow
......
......@@ -5,11 +5,8 @@
from collections import namedtuple
import tensorflow as tf
from ..models.regularize import regularize_cost_from_collection
from ..tfutils.tower import get_current_tower_context
from ..utils import logger
from ..utils.argtools import memoized_method
from ..utils.develop import log_deprecated
from ..utils.develop import deprecated
from ..compat import backport_tensor_spec, tfv1
TensorSpec = backport_tensor_spec()
......@@ -88,6 +85,8 @@ class ModelDescBase(object):
inputs = self.inputs()
if isinstance(inputs[0], tf.Tensor):
for p in inputs:
assert "Placeholder" in p.op.type, \
"inputs() have to return TensorSpec or placeholders! Found {} instead.".format(p)
assert p.graph == G, "Placeholders returned by inputs() should be created inside inputs()!"
return [TensorSpec(shape=p.shape, dtype=p.dtype, name=p.name) for p in inputs]
......@@ -99,9 +98,6 @@ class ModelDescBase(object):
"""
return [k.name for k in self.get_input_signature()]
def _get_inputs(self):
raise NotImplementedError()
def inputs(self):
"""
Returns a list of :class:`tf.TensorSpec` or placeholders.
......@@ -133,21 +129,7 @@ class ModelDescBase(object):
may require it to return necessary information to build the trainer.
For example, `SingleCostTrainer` expect this method to return the cost tensor.
"""
assert len(args) == len(self.get_input_signature()), \
"Number of inputs passed to the graph != number of inputs defined " \
"in ModelDesc! ({} != {})".format(len(args), len(self.get_input_signature()))
log_deprecated(
"ModelDescBase._build_graph() interface",
"Use build_graph() instead!",
"2019-03-30")
return self._build_graph(args)
def _build_graph(self, inputs):
"""
This is an alternative interface which takes a list of tensors, instead of positional arguments.
By default :meth:`build_graph` will call this method.
"""
pass
raise NotImplementedError()
class ModelDesc(ModelDescBase):
......@@ -163,31 +145,6 @@ class ModelDesc(ModelDescBase):
"""
def get_cost(self):
"""
Being deprecated.
You're recommended to return a cost tensor in :meth:`build_graph` method directly.
This function takes the `self.cost` tensor defined by :meth:`build_graph`,
and applies the collection
``tf.GraphKeys.REGULARIZATION_LOSSES`` to the cost automatically.
"""
log_deprecated(
"get_cost() and self.cost",
"Return the cost tensor directly in build_graph() instead!",
"2019-03-30")
cost = self._get_cost()
reg_cost = regularize_cost_from_collection()
if reg_cost.op.type != 'Const':
logger.warn("Regularization losses found in collection, and a 'cost' tensor was "
"not returned by `build_graph`. Therefore applying regularization automatically!")
return tf.add(cost, reg_cost, name='cost_with_regularizer')
else:
return cost
def _get_cost(self, *args):
return self.cost
@memoized_method
def get_optimizer(self):
"""
......@@ -199,20 +156,8 @@ class ModelDesc(ModelDescBase):
Returns:
a :class:`tf.train.Optimizer` instance.
"""
try:
ret = self._get_optimizer()
log_deprecated(
"ModelDescBase._get_optimizer() interface",
"Use optimizer() instead!",
"2019-03-30")
return ret
except NotImplementedError:
pass
return self.optimizer()
def _get_optimizer(self):
raise NotImplementedError()
def optimizer(self):
"""
Returns a `tf.train.Optimizer` instance.
......@@ -220,15 +165,6 @@ class ModelDesc(ModelDescBase):
"""
raise NotImplementedError()
@deprecated("Just use `build_graph` instead!")
def _build_graph_get_cost(self, *inputs):
"""
Equivalent to `build_graph`.
Used internally by trainers to get the final cost for optimization in a backward-compatible way.
"""
ret = self.build_graph(*inputs)
if not get_current_tower_context().is_training:
return None # this is the tower function, could be called for inference
if ret is not None:
return ret
else: # the old way, for compatibility
return self.get_cost()
return self.build_graph(*inputs)
......@@ -88,7 +88,7 @@ def launch_train_with_config(config, trainer):
# TowerFuncWrapper is a better abstraction (similar to tf.defun in the future)
trainer.setup_graph(
model.get_input_signature(), input,
model._build_graph_get_cost, model.get_optimizer)
model.build_graph, model.get_optimizer)
_check_unused_regularization()
trainer.train_with_defaults(
callbacks=config.callbacks,
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment