Commit ffd78d7e authored by Yuxin Wu's avatar Yuxin Wu

1. catch closed session in enqueuethread

2. PrintGradient
3. validate boxes before drawing
parent 8c0106e4
......@@ -191,8 +191,11 @@ class EnqueueThread(ShareSessionThread):
self.op.run(feed_dict=feed)
except (tf.errors.CancelledError, tf.errors.OutOfRangeError, DataFlowTerminated):
pass
except Exception:
logger.exception("Exception in {}:".format(self.name))
except Exception as e:
if isinstance(e, RuntimeError) and 'closed Session' in str(e):
pass
else:
logger.exception("Exception in {}:".format(self.name))
finally:
try:
self.close_op.run()
......
......@@ -9,12 +9,12 @@ import re
import six
import inspect
from ..utils import logger
from .symbolic_functions import rms
from .symbolic_functions import rms, print_stat
from .summary import add_moving_summary
__all__ = ['GradientProcessor',
'FilterNoneGrad', 'GlobalNormClip', 'MapGradient', 'SummaryGradient',
'CheckGradient', 'ScaleGradient']
'PrintGradient', 'CheckGradient', 'ScaleGradient']
@six.add_metaclass(ABCMeta)
......@@ -138,28 +138,55 @@ class MapGradient(GradientProcessor):
return ret
_summaried_gradient = set()
# TODO has dependency problems: sess.run may not depend on grad
# maybe group maintain op and grad ?
class SummaryGradient(MapGradient):
"""
Summary histogram and RMS for each gradient variable.
For each gradient tensor, summary its histogram and add it to moving
summaries.
"""
# avoid duplicate summaries from towers
# TODO this is global. not good.
_summaried_gradient = set()
def __init__(self):
super(SummaryGradient, self).__init__(self._mapper)
def __init__(self, regex='.*'):
"""
Args:
regex(str): same as in :class:`MapGradient`.
"""
super(SummaryGradient, self).__init__(self._mapper, regex)
def _mapper(self, grad, var):
name = var.op.name
if name not in _summaried_gradient:
_summaried_gradient.add(name)
if name not in SummaryGradient._summaried_gradient:
SummaryGradient._summaried_gradient.add(name)
tf.summary.histogram(name + '-grad', grad)
add_moving_summary(rms(grad, name=name + '/rms'))
return grad
class PrintGradient(MapGradient):
"""
Print the gradients every step with :func:`symbolic_functions.print_stat`.
"""
_printed = set()
# TODO this is global. not good.
def __init__(self, regex='.*'):
"""
Args:
regex(str): same as in :class:`MapGradient`.
"""
super(PrintGradient, self).__init__(self._mapper, regex)
def _mapper(self, grad, var):
name = var.op.name
if name not in PrintGradient._printed:
PrintGradient._printed.add(name)
grad = print_stat(grad, message=name + '-grad')
return grad
class CheckGradient(MapGradient):
"""
Check for numeric issue.
......
......@@ -105,7 +105,7 @@ def print_stat(x, message=None):
"""
if message is None:
message = x.op.name
return tf.Print(x, [tf.shape(x), tf.reduce_mean(x), x], summarize=20,
return tf.Print(x, [tf.shape(x), tf.reduce_mean(x), rms(x), x], summarize=20,
message=message, name='print_' + x.op.name)
......
......@@ -379,7 +379,12 @@ def draw_boxes(im, boxes, labels=None, color=None):
if labels is not None:
assert len(labels) == len(boxes), "{} != {}".format(len(labels), len(boxes))
areas = (boxes[:, 2] - boxes[:, 0] + 1) * (boxes[:, 3] - boxes[:, 1] + 1)
sorted_inds = np.argsort(-areas)
sorted_inds = np.argsort(-areas) # draw large ones first
assert areas.min() > 0, areas.min()
# allow equal, because we are not very strict about rounding error here
assert boxes[:, 0].min() >= 0 and boxes[:, 1].min() >= 0 \
and boxes[:, 2].max() <= im.shape[1] and boxes[:, 3].max() <= im.shape[0], \
"Image shape: {}\n Boxes:\n{}".format(str(im.shape), str(boxes))
im = im.copy()
COLOR = (218, 218, 218) if color is None else color
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment