Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
064ea7c7
Commit
064ea7c7
authored
Jul 30, 2017
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
cleaner scope logic in TowerContext
parent
844d8e69
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
37 additions
and
23 deletions
+37
-23
tensorpack/dataflow/base.py
tensorpack/dataflow/base.py
+5
-1
tensorpack/dataflow/prefetch.py
tensorpack/dataflow/prefetch.py
+10
-3
tensorpack/graph_builder/input_source.py
tensorpack/graph_builder/input_source.py
+2
-2
tensorpack/tfutils/tower.py
tensorpack/tfutils/tower.py
+20
-17
No files found.
tensorpack/dataflow/base.py
View file @
064ea7c7
...
...
@@ -8,7 +8,11 @@ from abc import abstractmethod, ABCMeta
import
six
from
..utils
import
get_rng
__all__
=
[
'DataFlow'
,
'ProxyDataFlow'
,
'RNGDataFlow'
]
__all__
=
[
'DataFlow'
,
'ProxyDataFlow'
,
'RNGDataFlow'
,
'DataFlowTerminated'
]
class
DataFlowTerminated
(
BaseException
):
pass
@
six
.
add_metaclass
(
ABCMeta
)
...
...
tensorpack/dataflow/prefetch.py
View file @
064ea7c7
...
...
@@ -6,11 +6,12 @@ from __future__ import print_function
import
multiprocessing
as
mp
import
itertools
from
six.moves
import
range
,
zip
,
queue
import
errno
import
uuid
import
os
import
zmq
from
.base
import
ProxyDataFlow
from
.base
import
ProxyDataFlow
,
DataFlowTerminated
from
.common
import
RepeatedData
from
..utils.concurrency
import
(
ensure_proc_terminate
,
mask_sigint
,
start_proc_mask_signal
,
...
...
@@ -154,8 +155,14 @@ class PrefetchDataZMQ(ProxyDataFlow):
dp
=
loads
(
self
.
socket
.
recv
(
copy
=
False
)
.
bytes
)
yield
dp
except
zmq
.
ContextTerminated
:
logger
.
info
(
"ContextTerminated in Master Prefetch Process"
)
return
logger
.
info
(
"[Prefetch Master] Context terminated."
)
raise
DataFlowTerminated
()
except
zmq
.
ZMQError
as
e
:
if
e
.
errno
==
errno
.
ENOTSOCK
:
# socket closed
logger
.
info
(
"[Prefetch Master] Socket closed."
)
raise
DataFlowTerminated
()
else
:
raise
except
:
raise
...
...
tensorpack/graph_builder/input_source.py
View file @
064ea7c7
...
...
@@ -13,7 +13,7 @@ from itertools import chain
from
six.moves
import
range
,
zip
from
.input_source_base
import
InputSource
from
..dataflow
import
DataFlow
,
RepeatedData
from
..dataflow
import
DataFlow
,
RepeatedData
,
DataFlowTerminated
from
..tfutils.summary
import
add_moving_summary
from
..tfutils.common
import
get_op_tensor_name
from
..tfutils.tower
import
get_current_tower_context
...
...
@@ -186,7 +186,7 @@ class EnqueueThread(ShareSessionThread):
feed
=
dict
(
zip
(
self
.
placehdrs
,
dp
))
# print 'qsize:', self.sess.run([self.op, self.size_op], feed_dict=feed)[1]
self
.
op
.
run
(
feed_dict
=
feed
)
except
(
tf
.
errors
.
CancelledError
,
tf
.
errors
.
OutOfRangeError
):
except
(
tf
.
errors
.
CancelledError
,
tf
.
errors
.
OutOfRangeError
,
DataFlowTerminated
):
pass
except
Exception
:
logger
.
exception
(
"Exception in EnqueueThread:"
)
...
...
tensorpack/tfutils/tower.py
View file @
064ea7c7
...
...
@@ -19,15 +19,15 @@ class TowerContext(object):
Args:
tower_name (str): The name scope of the tower.
is_training (bool): if None, automatically determine from tower_name.
index (int): index of this tower.
index (int): index of this tower
, only used in training
.
vs_name (str): Open a variable scope with this name, if given.
"""
self
.
_name
=
tower_name
self
.
_is_training
=
bool
(
is_training
)
if
not
self
.
_is_training
:
# TODO ugly
assert
index
==
0
and
vs_name
==
''
,
"vs_name and index are meaningless
in prediction!"
assert
index
==
0
and
vs_name
==
''
,
\
"vs_name and index are only used
in prediction!"
self
.
_index
=
int
(
index
)
self
.
_vs_name
=
str
(
vs_name
)
...
...
@@ -85,29 +85,32 @@ class TowerContext(object):
"Nesting TowerContext!"
_CurrentTowerContext
=
self
self
.
_ctxs
=
[]
curr_vs
=
tf
.
get_variable_scope
()
assert
curr_vs
.
name
==
''
,
"Nesting TowerContext with an existing variable scope!"
# assert empty name scope as well (>1.2.1?)
if
len
(
self
.
_name
):
if
self
.
has_own_variables
:
if
len
(
self
.
vs_name
):
self
.
_ctxs
.
append
(
tf
.
variable_scope
(
self
.
vs_name
))
if
not
self
.
is_training
:
# if not training, should handle reuse outside
# but still good to clear name_scope first
self
.
_ctxs
.
append
(
tf
.
name_scope
(
None
))
self
.
_ctxs
.
append
(
tf
.
name_scope
(
self
.
_name
))
else
:
if
self
.
is_training
:
if
self
.
has_own_variables
:
if
len
(
self
.
vs_name
):
self
.
_ctxs
.
append
(
tf
.
variable_scope
(
self
.
vs_name
))
else
:
self
.
_ctxs
.
append
(
tf
.
name_scope
(
self
.
_name
))
else
:
reuse
=
self
.
_index
>
0
if
reuse
is
True
:
# clear old name_scope (due to the existing variable_scope)
# and re-enter the current variable_scope
self
.
_ctxs
.
append
(
tf
.
name_scope
(
None
))
if
reuse
:
self
.
_ctxs
.
append
(
tf
.
variable_scope
(
tf
.
get_variable_scope
(),
reuse
=
True
))
else
:
# if not training, should handle reuse outside
# but still good to clear name_scope first
self
.
_ctxs
.
append
(
tf
.
name_scope
(
None
))
self
.
_ctxs
.
append
(
tf
.
name_scope
(
self
.
_name
))
self
.
_ctxs
.
append
(
tf
.
name_scope
(
self
.
_name
))
for
c
in
self
.
_ctxs
:
c
.
__enter__
()
# currently only check for predictor towers
if
not
self
.
is_training
and
get_tf_version_number
()
>=
1.2
:
if
get_tf_version_number
()
>=
1.2
:
ns
=
tf
.
get_default_graph
()
.
get_name_scope
()
assert
ns
==
self
.
_name
,
\
"Name conflict: name_scope inside tower '{}' becomes '{}'!"
.
format
(
self
.
_name
,
ns
)
\
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment