Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
6bdd0460
Commit
6bdd0460
authored
Jan 23, 2018
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
add plasma store
parent
1f07de76
Changes
2
Hide whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
46 additions
and
1 deletion
+46
-1
tensorpack/dataflow/parallel.py
tensorpack/dataflow/parallel.py
+44
-0
tensorpack/models/batch_norm.py
tensorpack/models/batch_norm.py
+2
-1
No files found.
tensorpack/dataflow/parallel.py
View file @
6bdd0460
...
...
@@ -186,6 +186,7 @@ class MultiProcessPrefetchData(ProxyDataFlow):
PrefetchData
=
MultiProcessPrefetchData
# TODO renamed to MultiProcessDataFlow{,ZMQ} if separated to a new project
class
PrefetchDataZMQ
(
_MultiProcessZMQDataFlow
):
"""
Prefetch data from a DataFlow using multiple processes, with ZeroMQ for
...
...
@@ -379,3 +380,46 @@ class MultiThreadPrefetchData(DataFlow):
for
p
in
self
.
threads
:
p
.
stop
()
p
.
join
()
class
PlasmaPutData
(
ProxyDataFlow
):
"""
Put each data point to plasma shared memory object store, and yield the object id instead.
"""
def
__init__
(
self
,
ds
):
super
(
PlasmaPutData
,
self
)
.
__init__
(
ds
)
def
reset_state
(
self
):
super
(
PlasmaPutData
,
self
)
.
reset_state
()
self
.
client
=
plasma
.
connect
(
"/tmp/plasma"
,
""
,
0
)
def
get_data
(
self
):
for
dp
in
self
.
ds
.
get_data
():
oid
=
self
.
client
.
put
(
dp
)
yield
[
oid
.
binary
()]
class
PlasmaGetData
(
ProxyDataFlow
):
"""
Take plasma object id from a DataFlow, and retrieve it from plasma shared
memory object store.
"""
def
__init__
(
self
,
ds
):
super
(
PlasmaGetData
,
self
)
.
__init__
(
ds
)
def
reset_state
(
self
):
super
(
PlasmaGetData
,
self
)
.
reset_state
()
self
.
client
=
plasma
.
connect
(
"/tmp/plasma"
,
""
,
0
)
def
get_data
(
self
):
for
dp
in
self
.
ds
.
get_data
():
oid
=
plasma
.
ObjectID
(
dp
[
0
])
dp
=
self
.
client
.
get
(
oid
)
yield
dp
try
:
import
pyarrow.plasma
as
plasma
except
ImportError
:
PlasmaPutData
=
create_dummy_class
(
'PlasmaPutData'
,
'pyarrow'
)
# noqa
PlasmaGetData
=
create_dummy_class
(
'PlasmaGetData'
,
'pyarrow'
)
# noqa
tensorpack/models/batch_norm.py
View file @
6bdd0460
...
...
@@ -181,7 +181,7 @@ def BatchNorm(x, use_local_stat=None, decay=0.9, epsilon=1e-5,
@
layer_register
()
def
BatchRenorm
(
x
,
rmax
,
dmax
,
decay
=
0.9
,
epsilon
=
1e-5
,
use_scale
=
True
,
use_bias
=
True
,
data_format
=
'NHWC'
):
use_scale
=
True
,
use_bias
=
True
,
gamma_init
=
None
,
data_format
=
'NHWC'
):
"""
Batch Renormalization layer, as described in the paper:
`Batch Renormalization: Towards Reducing Minibatch Dependence in Batch-Normalized Models
...
...
@@ -230,6 +230,7 @@ def BatchRenorm(x, rmax, dmax, decay=0.9, epsilon=1e-5,
'rmax'
:
rmax
,
'dmax'
:
dmax
},
renorm_momentum
=
0.99
,
gamma_initializer
=
gamma_init
,
fused
=
False
)
xn
=
layer
.
apply
(
x
,
training
=
ctx
.
is_training
,
scope
=
tf
.
get_variable_scope
())
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment