Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
71c879bc
Commit
71c879bc
authored
Sep 02, 2019
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
move build_and_reuse_placeholder to input_source
parent
4dadc6f0
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
36 additions
and
34 deletions
+36
-34
docs/modules/graph_builder.rst
docs/modules/graph_builder.rst
+3
-0
tensorpack/graph_builder/model_desc.py
tensorpack/graph_builder/model_desc.py
+1
-30
tensorpack/graph_builder/utils.py
tensorpack/graph_builder/utils.py
+1
-1
tensorpack/input_source/input_source.py
tensorpack/input_source/input_source.py
+1
-2
tensorpack/input_source/input_source_base.py
tensorpack/input_source/input_source_base.py
+30
-1
No files found.
docs/modules/graph_builder.rst
View file @
71c879bc
tensorpack.graph_builder package
================================
These are some useful functions if you need to write your own trainers.
Note that they may not be well maintained.
.. automodule:: tensorpack.graph_builder
:members:
:undoc-members:
...
...
tensorpack/graph_builder/model_desc.py
View file @
71c879bc
...
...
@@ -17,35 +17,6 @@ TensorSpec = backport_tensor_spec()
__all__
=
[
'InputDesc'
,
'ModelDesc'
,
'ModelDescBase'
]
def
build_or_reuse_placeholder
(
tensor_spec
):
"""
Build a tf.placeholder from the metadata in the given tensor spec, or return an existing one.
Args:
tensor_spec (tf.TensorSpec):
Returns:
tf.Tensor:
"""
g
=
tfv1
.
get_default_graph
()
name
=
tensor_spec
.
name
try
:
tensor
=
g
.
get_tensor_by_name
(
name
+
':0'
)
assert
"Placeholder"
in
tensor
.
op
.
type
,
"Tensor {} exists but is not a placeholder!"
.
format
(
name
)
assert
tensor_spec
.
is_compatible_with
(
tensor
),
\
"Tensor {} exists but is not compatible with the signature!"
.
format
(
tensor
)
if
tensor
.
shape
==
tensor_spec
.
shape
:
# It might be desirable to use a placeholder of a different shape in some tower
# (e.g., a less specific shape)
return
tensor
except
KeyError
:
pass
with
tfv1
.
name_scope
(
None
):
# clear any name scope it might get called in
ret
=
tfv1
.
placeholder
(
tensor_spec
.
dtype
,
shape
=
tensor_spec
.
shape
,
name
=
tensor_spec
.
name
)
return
ret
class
InputDesc
(
namedtuple
(
'InputDescTuple'
,
[
'type'
,
'shape'
,
'name'
])):
"""
...
...
@@ -65,7 +36,7 @@ class InputDesc(
shape (tuple):
name (str):
"""
# TODO mark deprecated
log_deprecated
(
"InputDesc"
,
"Use tf.TensorSpec instead!"
,
"2020-03-01"
)
assert
isinstance
(
type
,
tf
.
DType
),
type
return
tf
.
TensorSpec
(
shape
=
shape
,
dtype
=
type
,
name
=
name
)
...
...
tensorpack/graph_builder/utils.py
View file @
71c879bc
...
...
@@ -13,7 +13,7 @@ from ..tfutils.varreplace import custom_getter_scope
from
..utils
import
logger
from
..utils.argtools
import
call_only_once
__all__
=
[
"LeastLoadedDeviceSetter"
]
__all__
=
[
"LeastLoadedDeviceSetter"
,
"allreduce_grads"
,
"aggregate_grads"
]
"""
...
...
tensorpack/input_source/input_source.py
View file @
71c879bc
...
...
@@ -18,8 +18,7 @@ from ..tfutils.summary import add_moving_summary
from
..tfutils.tower
import
get_current_tower_context
from
..utils
import
logger
from
..utils.concurrency
import
ShareSessionThread
from
.input_source_base
import
InputSource
from
..graph_builder.model_desc
import
build_or_reuse_placeholder
from
.input_source_base
import
InputSource
,
build_or_reuse_placeholder
try
:
from
tensorflow.python.ops.data_flow_ops
import
StagingArea
...
...
tensorpack/input_source/input_source_base.py
View file @
71c879bc
...
...
@@ -12,11 +12,40 @@ from ..callbacks.base import CallbackFactory
from
..tfutils.common
import
get_op_tensor_name
from
..utils
import
logger
from
..utils.argtools
import
call_only_once
,
memoized_method
from
..
graph_builder.model_desc
import
build_or_reuse_placeholder
from
..
compat
import
tfv1
__all__
=
[
'InputSource'
,
'remap_input_source'
]
def
build_or_reuse_placeholder
(
tensor_spec
):
"""
Build a tf.placeholder from the metadata in the given tensor spec, or return an existing one.
Args:
tensor_spec (tf.TensorSpec):
Returns:
tf.Tensor:
"""
g
=
tfv1
.
get_default_graph
()
name
=
tensor_spec
.
name
try
:
tensor
=
g
.
get_tensor_by_name
(
name
+
':0'
)
assert
"Placeholder"
in
tensor
.
op
.
type
,
"Tensor {} exists but is not a placeholder!"
.
format
(
name
)
assert
tensor_spec
.
is_compatible_with
(
tensor
),
\
"Tensor {} exists but is not compatible with the signature!"
.
format
(
tensor
)
if
tensor
.
shape
==
tensor_spec
.
shape
:
# It might be desirable to use a placeholder of a different shape in some tower
# (e.g., a less specific shape)
return
tensor
except
KeyError
:
pass
with
tfv1
.
name_scope
(
None
):
# clear any name scope it might get called in
ret
=
tfv1
.
placeholder
(
tensor_spec
.
dtype
,
shape
=
tensor_spec
.
shape
,
name
=
tensor_spec
.
name
)
return
ret
def
get_tensors_inputs
(
placeholders
,
tensors
,
names
):
"""
Args:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment