Commit cc2df915 authored by Yuxin Wu's avatar Yuxin Wu

hide something from API docs

parent bed3fa19
...@@ -353,11 +353,15 @@ def process_signature(app, what, name, obj, options, signature, ...@@ -353,11 +353,15 @@ def process_signature(app, what, name, obj, options, signature,
return signature, return_annotation return signature, return_annotation
def autodoc_skip_member(app, what, name, obj, skip, options): def autodoc_skip_member(app, what, name, obj, skip, options):
# we hide something deliberately
if getattr(obj, '__HIDE_SPHINX_DOC__', False):
return True
if name == '__init__': if name == '__init__':
if obj.__doc__ and skip: if obj.__doc__ and skip:
# include_init_with_doc doesn't work well for decorated init # include_init_with_doc doesn't work well for decorated init
# https://github.com/sphinx-doc/sphinx/issues/4258 # https://github.com/sphinx-doc/sphinx/issues/4258
return False return False
# hide deprecated stuff
if name in [ if name in [
'MultiGPUTrainerBase', 'MultiGPUTrainerBase',
'FeedfreeInferenceRunner', 'FeedfreeInferenceRunner',
......
...@@ -42,6 +42,7 @@ class DistributedReplicatedBuilder(DataParallelBuilder): ...@@ -42,6 +42,7 @@ class DistributedReplicatedBuilder(DataParallelBuilder):
server = tf.train.Server( server = tf.train.Server(
cluster_spec, job_name=args.job, task_index=args.task, cluster_spec, job_name=args.job, task_index=args.task,
config=get_default_sess_config()) config=get_default_sess_config())
# initialize trainer with this server object
.. code-block:: none .. code-block:: none
......
...@@ -9,6 +9,7 @@ from six.moves import zip ...@@ -9,6 +9,7 @@ from six.moves import zip
from ..utils import logger from ..utils import logger
from ..utils.argtools import call_only_once from ..utils.argtools import call_only_once
from ..utils.naming import TRAIN_TOWER_FREEZE_KEYS, PREDICT_TOWER_FREEZE_KEYS from ..utils.naming import TRAIN_TOWER_FREEZE_KEYS, PREDICT_TOWER_FREEZE_KEYS
from ..utils.develop import HIDE_DOC
from .collection import CollectionGuard from .collection import CollectionGuard
from .common import get_tf_version_number, get_op_or_tensor_by_name, get_op_tensor_name from .common import get_tf_version_number, get_op_or_tensor_by_name, get_op_tensor_name
...@@ -261,11 +262,8 @@ class TowerTensorHandle(object): ...@@ -261,11 +262,8 @@ class TowerTensorHandle(object):
inputs/outputs created in each tower. inputs/outputs created in each tower.
""" """
# TODO hide it from doc @HIDE_DOC
def __init__(self, ctx, input, output, inputs_desc=None): def __init__(self, ctx, input, output, inputs_desc=None):
"""
Don't use it because you never need to create the handle by yourself.
"""
self._ctx = ctx self._ctx = ctx
self._extra_tensor_names = {} self._extra_tensor_names = {}
......
...@@ -10,6 +10,7 @@ from ..tfutils.sesscreate import NewSessionCreator ...@@ -10,6 +10,7 @@ from ..tfutils.sesscreate import NewSessionCreator
from ..utils import logger from ..utils import logger
from ..utils.argtools import map_arg from ..utils.argtools import map_arg
from ..utils.develop import HIDE_DOC
from ..tfutils import get_global_step_var from ..tfutils import get_global_step_var
from ..tfutils.distributed import get_distributed_session_creator from ..tfutils.distributed import get_distributed_session_creator
from ..tfutils.tower import TowerContext from ..tfutils.tower import TowerContext
...@@ -144,8 +145,6 @@ class DistributedTrainerReplicated(SingleCostTrainer): ...@@ -144,8 +145,6 @@ class DistributedTrainerReplicated(SingleCostTrainer):
Args: Args:
gpus (list[int]): list of GPU ids. gpus (list[int]): list of GPU ids.
server (tf.train.Server): the server with ps and workers. server (tf.train.Server): the server with ps and workers.
The job_name must be 'worker' because 'ps' job doesn't need to
build any graph.
""" """
self.server = server self.server = server
self.job_name = server.server_def.job_name self.job_name = server.server_def.job_name
...@@ -196,6 +195,7 @@ class DistributedTrainerReplicated(SingleCostTrainer): ...@@ -196,6 +195,7 @@ class DistributedTrainerReplicated(SingleCostTrainer):
callbacks.append(cb) callbacks.append(cb)
return callbacks return callbacks
@HIDE_DOC
def initialize(self, session_creator, session_init): def initialize(self, session_creator, session_init):
if not isinstance(session_creator, NewSessionCreator) or \ if not isinstance(session_creator, NewSessionCreator) or \
session_creator.user_provided_config: session_creator.user_provided_config:
......
...@@ -122,6 +122,11 @@ def deprecated(text="", eos=""): ...@@ -122,6 +122,11 @@ def deprecated(text="", eos=""):
return deprecated_inner return deprecated_inner
def HIDE_DOC(func):
func.__HIDE_SPHINX_DOC__ = True
return func
# Copied from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/util/lazy_loader.py # Copied from https://github.com/tensorflow/tensorflow/blob/master/tensorflow/python/util/lazy_loader.py
class LazyLoader(types.ModuleType): class LazyLoader(types.ModuleType):
def __init__(self, local_name, parent_module_globals, name): def __init__(self, local_name, parent_module_globals, name):
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment