Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
e00ec36b
Commit
e00ec36b
authored
Aug 12, 2017
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
Clean-up imports so that dataflow can be imported without TF.
parent
72c7684a
Changes
9
Hide whitespace changes
Inline
Side-by-side
Showing
9 changed files
with
153 additions
and
115 deletions
+153
-115
.travis.yml
.travis.yml
+3
-1
tensorpack/__init__.py
tensorpack/__init__.py
+10
-7
tensorpack/dataflow/remote.py
tensorpack/dataflow/remote.py
+3
-2
tensorpack/libinfo.py
tensorpack/libinfo.py
+7
-2
tensorpack/user_ops/__init__.py
tensorpack/user_ops/__init__.py
+0
-21
tensorpack/user_ops/common.py
tensorpack/user_ops/common.py
+20
-0
tensorpack/user_ops/test-recv-op.py
tensorpack/user_ops/test-recv-op.py
+2
-2
tensorpack/user_ops/zmq_recv.py
tensorpack/user_ops/zmq_recv.py
+107
-0
tensorpack/utils/serialize.py
tensorpack/utils/serialize.py
+1
-80
No files found.
.travis.yml
View file @
e00ec36b
...
...
@@ -38,8 +38,10 @@ install:
-
pip install flake8 scikit-image opencv-python pypandoc
# here we use opencv-python, but users are in general not recommended to use this package,
# because it brings issues working with tensorflow on gpu
-
./tests/install-tensorflow.sh
-
pip install .
# check that dataflow can be imported alone
-
python -c "import tensorpack.dataflow"
-
./tests/install-tensorflow.sh
before_script
:
-
flake8 --version
...
...
tensorpack/__init__.py
View file @
e00ec36b
...
...
@@ -3,15 +3,18 @@
# Author: Yuxin Wu <ppwwyyxx@gmail.com>
from
tensorpack.libinfo
import
__version__
from
tensorpack.libinfo
import
__version__
,
_HAS_TF
from
tensorpack.utils
import
*
from
tensorpack.models
import
*
from
tensorpack.dataflow
import
*
from
tensorpack.callbacks
import
*
from
tensorpack.tfutils
import
*
# dataflow can be used alone without installing tensorflow
if
_HAS_TF
:
from
tensorpack.models
import
*
from
tensorpack.train
import
*
from
tensorpack.graph_builder
import
*
from
tensorpack.predict
import
*
from
tensorpack.callbacks
import
*
from
tensorpack.tfutils
import
*
from
tensorpack.train
import
*
from
tensorpack.graph_builder
import
*
from
tensorpack.predict
import
*
tensorpack/dataflow/remote.py
View file @
e00ec36b
...
...
@@ -8,7 +8,7 @@ from collections import deque
from
.base
import
DataFlow
,
DataFlowReentrantGuard
from
..utils
import
logger
from
..utils.utils
import
get_tqdm
from
..utils.serialize
import
dumps
,
loads
,
dumps_for_tfop
from
..utils.serialize
import
dumps
,
loads
try
:
import
zmq
except
ImportError
:
...
...
@@ -30,7 +30,8 @@ def send_dataflow_zmq(df, addr, hwm=50, print_interval=100, format=None):
"""
# format (str): The serialization format. ZMQ Op is still not publicly usable now
# Default format would use :mod:`tensorpack.utils.serialize`.
dump_fn
=
dumps
if
format
is
None
else
dumps_for_tfop
# dump_fn = dumps if format is None else dumps_for_tfop
dump_fn
=
dumps
ctx
=
zmq
.
Context
()
socket
=
ctx
.
socket
(
zmq
.
PUSH
)
socket
.
set_hwm
(
hwm
)
...
...
tensorpack/libinfo.py
View file @
e00ec36b
...
...
@@ -25,7 +25,12 @@ os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1' # issue#9339
os
.
environ
[
'TF_AUTOTUNE_THRESHOLD'
]
=
'3'
# use more warm-up
os
.
environ
[
'TF_AVGPOOL_USE_CUDNN'
]
=
'1'
# issue#8566
import
tensorflow
as
tf
# noqa
assert
int
(
tf
.
__version__
.
split
(
'.'
)[
0
])
>=
1
,
"TF>=1.0 is required!"
try
:
import
tensorflow
as
tf
# noqa
assert
int
(
tf
.
__version__
.
split
(
'.'
)[
0
])
>=
1
,
"TF>=1.0 is required!"
_HAS_TF
=
True
except
ImportError
:
_HAS_TF
=
False
__version__
=
'0.4.0'
tensorpack/user_ops/__init__.py
View file @
e00ec36b
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: __init__.py
# Author: Yuxin Wu <ppwwyyxxc@gmail.com>
from
__future__
import
print_function
import
tensorflow
as
tf
import
os
__all__
=
[
'zmq_recv'
]
include_dir
=
tf
.
sysconfig
.
get_include
()
file_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
compile_cmd
=
'INCLUDE_DIR="-isystem {}" make -C "{}"'
.
format
(
include_dir
,
file_dir
)
print
(
"Compiling user ops ..."
)
ret
=
os
.
system
(
compile_cmd
)
if
ret
!=
0
:
print
(
"Failed to compile user ops!"
)
zmq_recv
=
None
else
:
recv_mod
=
tf
.
load_op_library
(
os
.
path
.
join
(
file_dir
,
'zmq_recv_op.so'
))
# TODO trigger recompile when load fails
zmq_recv
=
recv_mod
.
zmq_recv
tensorpack/user_ops/common.py
0 → 100644
View file @
e00ec36b
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: common.py
from
__future__
import
print_function
import
tensorflow
as
tf
import
os
def
compile
():
# TODO check modtime?
include_dir
=
tf
.
sysconfig
.
get_include
()
file_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
compile_cmd
=
'INCLUDE_DIR="-isystem {}" make -C "{}"'
.
format
(
include_dir
,
file_dir
)
ret
=
os
.
system
(
compile_cmd
)
return
ret
if
__name__
==
'__main__'
:
compile
()
tensorpack/user_ops/test-recv-op.py
View file @
e00ec36b
...
...
@@ -10,8 +10,8 @@ import multiprocessing as mp
import
numpy
as
np
os
.
environ
[
'TF_CPP_MIN_LOG_LEVEL'
]
=
'2'
import
tensorflow
as
tf
# noqa
from
tensorpack.u
tils.serialize
import
dump_tensor_protos
,
to_tensor_proto
# noqa
from
tensorpack.user_ops
import
zmq_recv
# noqa
from
tensorpack.u
ser_ops.zmq_recv
import
(
# noqa
zmq_recv
,
dump_tensor_protos
,
to_tensor_proto
)
try
:
num
=
int
(
sys
.
argv
[
1
])
...
...
tensorpack/user_ops/zmq_recv.py
0 → 100644
View file @
e00ec36b
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: zmq_recv.py
import
tensorflow
as
tf
import
struct
import
numpy
as
np
import
os
from
tensorflow.core.framework.tensor_pb2
import
TensorProto
from
tensorflow.core.framework
import
types_pb2
as
DataType
# have to import like this: https://github.com/tensorflow/tensorflow/commit/955f038afbeb81302cea43058078e68574000bce
from
.common
import
compile
__all__
=
[
'zmq_recv'
,
'dumps_for_tfop'
,
'dump_tensor_protos'
,
'to_tensor_proto'
]
def
build
():
global
zmq_recv
ret
=
compile
()
if
ret
!=
0
:
zmq_recv
=
None
else
:
file_dir
=
os
.
path
.
dirname
(
os
.
path
.
abspath
(
__file__
))
recv_mod
=
tf
.
load_op_library
(
os
.
path
.
join
(
file_dir
,
'zmq_recv_op.so'
))
zmq_recv
=
recv_mod
.
zmq_recv
build
()
_DTYPE_DICT
=
{
np
.
float32
:
DataType
.
DT_FLOAT
,
np
.
float64
:
DataType
.
DT_DOUBLE
,
np
.
int32
:
DataType
.
DT_INT32
,
np
.
int8
:
DataType
.
DT_INT8
,
np
.
uint8
:
DataType
.
DT_UINT8
,
}
_DTYPE_DICT
=
{
np
.
dtype
(
k
):
v
for
k
,
v
in
_DTYPE_DICT
.
items
()}
# TODO support string tensor and scalar
def
to_tensor_proto
(
arr
):
"""
Convert a numpy array to TensorProto
Args:
arr: numpy.ndarray. only supports common numerical types
"""
dtype
=
_DTYPE_DICT
[
arr
.
dtype
]
ret
=
TensorProto
()
shape
=
ret
.
tensor_shape
for
s
in
arr
.
shape
:
d
=
shape
.
dim
.
add
()
d
.
size
=
s
ret
.
dtype
=
dtype
buf
=
arr
.
tobytes
()
ret
.
tensor_content
=
buf
return
ret
def
dump_tensor_protos
(
protos
):
"""
Serialize a list of :class:`TensorProto`, for communication between custom TensorFlow ops.
Args:
protos (list): list of :class:`TensorProto` instance
Notes:
The format is:
[#tensors(int32)]
[tensor1][tensor2]...
Where each tensor is:
[dtype(int32)][ndims(int32)][shape[0](int32)]...[shape[n](int32)]
[len(buffer)(int32)][buffer]
"""
# TODO use int64
s
=
struct
.
pack
(
'=i'
,
len
(
protos
))
for
p
in
protos
:
tensor_content
=
p
.
tensor_content
s
+=
struct
.
pack
(
'=i'
,
int
(
p
.
dtype
))
dims
=
p
.
tensor_shape
.
dim
s
+=
struct
.
pack
(
'=i'
,
len
(
dims
))
for
k
in
dims
:
s
+=
struct
.
pack
(
'=i'
,
k
.
size
)
s
+=
struct
.
pack
(
'=i'
,
len
(
tensor_content
))
# won't send stuff over 2G
s
+=
tensor_content
return
s
def
dumps_for_tfop
(
dp
):
"""
Dump a datapoint (list of nparray) into a format that the ZMQRecv op in tensorpack would accept.
"""
protos
=
[
to_tensor_proto
(
arr
)
for
arr
in
dp
]
return
dump_tensor_protos
(
protos
)
tensorpack/utils/serialize.py
View file @
e00ec36b
...
...
@@ -6,17 +6,10 @@
import
msgpack
import
msgpack_numpy
import
struct
import
numpy
as
np
from
tensorflow.core.framework.tensor_pb2
import
TensorProto
from
tensorflow.core.framework
import
types_pb2
as
DataType
# have to import like this: https://github.com/tensorflow/tensorflow/commit/955f038afbeb81302cea43058078e68574000bce
msgpack_numpy
.
patch
()
__all__
=
[
'loads'
,
'dumps'
,
'dumps_for_tfop'
,
'dump_tensor_protos'
,
'to_tensor_proto'
]
__all__
=
[
'loads'
,
'dumps'
]
def
dumps
(
obj
):
...
...
@@ -35,75 +28,3 @@ def loads(buf):
buf (str): serialized object.
"""
return
msgpack
.
loads
(
buf
)
_DTYPE_DICT
=
{
np
.
float32
:
DataType
.
DT_FLOAT
,
np
.
float64
:
DataType
.
DT_DOUBLE
,
np
.
int32
:
DataType
.
DT_INT32
,
np
.
int8
:
DataType
.
DT_INT8
,
np
.
uint8
:
DataType
.
DT_UINT8
,
}
_DTYPE_DICT
=
{
np
.
dtype
(
k
):
v
for
k
,
v
in
_DTYPE_DICT
.
items
()}
# TODO support string tensor and scalar
def
to_tensor_proto
(
arr
):
"""
Convert a numpy array to TensorProto
Args:
arr: numpy.ndarray. only supports common numerical types
"""
dtype
=
_DTYPE_DICT
[
arr
.
dtype
]
ret
=
TensorProto
()
shape
=
ret
.
tensor_shape
for
s
in
arr
.
shape
:
d
=
shape
.
dim
.
add
()
d
.
size
=
s
ret
.
dtype
=
dtype
buf
=
arr
.
tobytes
()
ret
.
tensor_content
=
buf
return
ret
def
dump_tensor_protos
(
protos
):
"""
Serialize a list of :class:`TensorProto`, for communication between custom TensorFlow ops.
Args:
protos (list): list of :class:`TensorProto` instance
Notes:
The format is:
[#tensors(int32)]
[tensor1][tensor2]...
Where each tensor is:
[dtype(int32)][ndims(int32)][shape[0](int32)]...[shape[n](int32)]
[len(buffer)(int32)][buffer]
"""
# TODO use int64
s
=
struct
.
pack
(
'=i'
,
len
(
protos
))
for
p
in
protos
:
tensor_content
=
p
.
tensor_content
s
+=
struct
.
pack
(
'=i'
,
int
(
p
.
dtype
))
dims
=
p
.
tensor_shape
.
dim
s
+=
struct
.
pack
(
'=i'
,
len
(
dims
))
for
k
in
dims
:
s
+=
struct
.
pack
(
'=i'
,
k
.
size
)
s
+=
struct
.
pack
(
'=i'
,
len
(
tensor_content
))
# won't send stuff over 2G
s
+=
tensor_content
return
s
def
dumps_for_tfop
(
dp
):
protos
=
[
to_tensor_proto
(
arr
)
for
arr
in
dp
]
return
dump_tensor_protos
(
protos
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment