Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
1bf2737f
Commit
1bf2737f
authored
Jul 31, 2017
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
hide deprecated methods from documentation
parent
a9cba3c6
Changes
5
Hide whitespace changes
Inline
Side-by-side
Showing
5 changed files
with
27 additions
and
25 deletions
+27
-25
docs/conf.py
docs/conf.py
+13
-0
docs/tutorial/trainer.md
docs/tutorial/trainer.md
+1
-1
tensorpack/libinfo.py
tensorpack/libinfo.py
+6
-4
tensorpack/tfutils/scope_utils.py
tensorpack/tfutils/scope_utils.py
+0
-18
tensorpack/train/distributed.py
tensorpack/train/distributed.py
+7
-2
No files found.
docs/conf.py
View file @
1bf2737f
...
@@ -32,6 +32,7 @@ MOCK_MODULES = ['scipy', 'tabulate',
...
@@ -32,6 +32,7 @@ MOCK_MODULES = ['scipy', 'tabulate',
'gym'
,
'functools32'
]
'gym'
,
'functools32'
]
for
mod_name
in
MOCK_MODULES
:
for
mod_name
in
MOCK_MODULES
:
sys
.
modules
[
mod_name
]
=
mock
.
Mock
(
name
=
mod_name
)
sys
.
modules
[
mod_name
]
=
mock
.
Mock
(
name
=
mod_name
)
sys
.
modules
[
'cv2'
]
.
__version__
=
'3.2.1'
# fake version
import
tensorpack
import
tensorpack
...
@@ -349,6 +350,18 @@ def process_signature(app, what, name, obj, options, signature,
...
@@ -349,6 +350,18 @@ def process_signature(app, what, name, obj, options, signature,
return
signature
,
return_annotation
return
signature
,
return_annotation
def
autodoc_skip_member
(
app
,
what
,
name
,
obj
,
skip
,
options
):
def
autodoc_skip_member
(
app
,
what
,
name
,
obj
,
skip
,
options
):
for
deprecate
in
[
'DistributedReplicatedTrainer'
,
'SingleCostFeedfreeTrainer'
,
'SimpleFeedfreeTrainer'
,
'FeedfreeTrainerBase'
,
'FeedfreeInferenceRunner'
,
'replace_get_variable'
,
'remap_get_variable'
,
'freeze_get_variable'
,
'ParamRestore'
]:
if
deprecate
in
name
:
return
True
if
name
in
[
'get_data'
,
'size'
,
'reset_state'
]:
if
name
in
[
'get_data'
,
'size'
,
'reset_state'
]:
# skip these methods with empty docstring
# skip these methods with empty docstring
if
not
obj
.
__doc__
and
inspect
.
isfunction
(
obj
):
if
not
obj
.
__doc__
and
inspect
.
isfunction
(
obj
):
...
...
docs/tutorial/trainer.md
View file @
1bf2737f
...
@@ -6,7 +6,7 @@ Tensorpack base trainer implements the logic of __running the iteration__.
...
@@ -6,7 +6,7 @@ Tensorpack base trainer implements the logic of __running the iteration__.
Users or derived trainers should implement __what the iteration is__.
Users or derived trainers should implement __what the iteration is__.
Most neural network training tasks are single-cost optimization.
Most neural network training tasks are single-cost optimization.
Tensorpack provides some trainer implementations for such tasks
:
Tensorpack provides some trainer implementations for such tasks
.
These trainers will build the graph based on the given
`ModelDesc`
, and minimizes
`ModelDesc.cost`
.
These trainers will build the graph based on the given
`ModelDesc`
, and minimizes
`ModelDesc.cost`
.
Existing trainers were implemented with certain prefetch mechanism,
Existing trainers were implemented with certain prefetch mechanism,
...
...
tensorpack/libinfo.py
View file @
1bf2737f
import
os
# issue#7378 may happen with custom opencv. It doesn't hurt to disable opencl
os
.
environ
[
'OPENCV_OPENCL_RUNTIME'
]
=
''
try
:
try
:
# issue#1924 may happen on old systems
# issue#1924 may happen on old systems
import
cv2
# noqa
import
cv2
# noqa
if
int
(
cv2
.
__version__
.
split
(
'.'
)[
0
])
==
3
:
cv2
.
ocl
.
setUseOpenCL
(
False
)
except
ImportError
:
except
ImportError
:
pass
pass
import
os
# issue#7378 may happen with custom opencv. It doesn't hurt to disable opencl
os
.
environ
[
'OPENCV_OPENCL_RUNTIME'
]
=
''
os
.
environ
[
'TF_ENABLE_WINOGRAD_NONFUSED'
]
=
'1'
# issue#9339
os
.
environ
[
'TF_ENABLE_WINOGRAD_NONFUSED'
]
=
'1'
# issue#9339
os
.
environ
[
'TF_AUTOTUNE_THRESHOLD'
]
=
'3'
# use more warm-up
os
.
environ
[
'TF_AUTOTUNE_THRESHOLD'
]
=
'3'
# use more warm-up
...
...
tensorpack/tfutils/scope_utils.py
View file @
1bf2737f
...
@@ -5,8 +5,6 @@
...
@@ -5,8 +5,6 @@
import
tensorflow
as
tf
import
tensorflow
as
tf
import
six
import
six
from
.common
import
get_tf_version_number
from
..utils.develop
import
deprecated
if
six
.
PY2
:
if
six
.
PY2
:
import
functools32
as
functools
import
functools32
as
functools
else
:
else
:
...
@@ -15,22 +13,6 @@ else:
...
@@ -15,22 +13,6 @@ else:
__all__
=
[
'auto_reuse_variable_scope'
]
__all__
=
[
'auto_reuse_variable_scope'
]
@
deprecated
(
"Use tf.get_default_graph().get_name_scope() (available since 1.2.1)."
)
def
get_name_scope_name
():
"""
Returns:
str: the name of the current name scope, without the ending '/'.
"""
if
get_tf_version_number
()
>
1.2
:
return
tf
.
get_default_graph
()
.
get_name_scope
()
else
:
g
=
tf
.
get_default_graph
()
s
=
"RANDOM_STR_ABCDEFG"
unique
=
g
.
unique_name
(
s
)
scope
=
unique
[:
-
len
(
s
)]
.
rstrip
(
'/'
)
return
scope
def
auto_reuse_variable_scope
(
func
):
def
auto_reuse_variable_scope
(
func
):
"""
"""
A decorator which automatically reuse the current variable scope if the
A decorator which automatically reuse the current variable scope if the
...
...
tensorpack/train/distributed.py
View file @
1bf2737f
...
@@ -13,7 +13,7 @@ from ..callbacks import RunOp
...
@@ -13,7 +13,7 @@ from ..callbacks import RunOp
from
..tfutils.sesscreate
import
NewSessionCreator
from
..tfutils.sesscreate
import
NewSessionCreator
from
..tfutils.common
import
get_global_step_var
,
get_op_tensor_name
from
..tfutils.common
import
get_global_step_var
,
get_op_tensor_name
__all__
=
[
'DistributedReplicatedTrainer'
]
__all__
=
[
'DistributedReplicatedTrainer'
,
'DistributedTrainerReplicated'
]
class
OverrideToLocalVariable
(
object
):
class
OverrideToLocalVariable
(
object
):
...
@@ -34,7 +34,7 @@ class OverrideToLocalVariable(object):
...
@@ -34,7 +34,7 @@ class OverrideToLocalVariable(object):
return
getter
(
name
,
*
args
,
**
kwargs
)
return
getter
(
name
,
*
args
,
**
kwargs
)
class
Distributed
ReplicatedTrainer
(
MultiGPUTrainerBase
):
class
Distributed
TrainerReplicated
(
MultiGPUTrainerBase
):
"""
"""
Distributed replicated training.
Distributed replicated training.
Each worker process builds the same model on one or more GPUs.
Each worker process builds the same model on one or more GPUs.
...
@@ -323,3 +323,8 @@ class DistributedReplicatedTrainer(MultiGPUTrainerBase):
...
@@ -323,3 +323,8 @@ class DistributedReplicatedTrainer(MultiGPUTrainerBase):
@
property
@
property
def
vs_name_for_predictor
(
self
):
def
vs_name_for_predictor
(
self
):
return
"tower0"
return
"tower0"
def
DistributedReplicatedTrainer
(
*
args
,
**
kwargs
):
logger
.
warn
(
"DistributedReplicatedTrainer was renamed to DistributedTrainerReplicated!"
)
return
DistributedTrainerReplicated
(
*
args
,
**
kwargs
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment