Commit 59dd1aa5 authored by Yuxin Wu's avatar Yuxin Wu

warning about optional dependency

parent 9870d216
...@@ -14,7 +14,7 @@ try: ...@@ -14,7 +14,7 @@ try:
# not sure does it cause other problems # not sure does it cause other problems
__all__ = ['GymEnv'] __all__ = ['GymEnv']
except ImportError: except ImportError:
logger.warn("Cannot import gym. GymEnv won't be available.") logger.warn_dependency('GymEnv', 'gym')
__all__ = [] __all__ = []
import threading import threading
......
...@@ -30,7 +30,7 @@ __all__ = ['SimulatorProcess', 'SimulatorMaster', ...@@ -30,7 +30,7 @@ __all__ = ['SimulatorProcess', 'SimulatorMaster',
try: try:
import zmq import zmq
except ImportError: except ImportError:
logger.warn("Error in 'import zmq'. RL simulator won't be available.") logger.warn_dependency('Simulator', 'zmq')
__all__ = [] __all__ = []
class TransitionExperience(object): class TransitionExperience(object):
......
...@@ -15,7 +15,7 @@ try: ...@@ -15,7 +15,7 @@ try:
from scipy.io import loadmat from scipy.io import loadmat
__all__ = ['BSDS500'] __all__ = ['BSDS500']
except ImportError: except ImportError:
logger.warn("Cannot import scipy. BSDS500 dataset won't be available!") logger.warn_dependency('BSDS500', 'scipy.io')
__all__ = [] __all__ = []
DATA_URL = "http://www.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/BSR/BSR_bsds500.tgz" DATA_URL = "http://www.eecs.berkeley.edu/Research/Projects/CS/vision/grouping/BSR/BSR_bsds500.tgz"
......
...@@ -15,7 +15,7 @@ try: ...@@ -15,7 +15,7 @@ try:
import scipy.io import scipy.io
__all__ = ['SVHNDigit'] __all__ = ['SVHNDigit']
except ImportError: except ImportError:
logger.warn("Cannot import scipy. SVHNDigit dataset won't be available!") logger.warn_dependency('SVHNDigit', 'scipy.io')
__all__ = [] __all__ = []
SVHN_URL = "http://ufldl.stanford.edu/housenumbers/" SVHN_URL = "http://ufldl.stanford.edu/housenumbers/"
......
...@@ -13,7 +13,7 @@ from .base import RNGDataFlow ...@@ -13,7 +13,7 @@ from .base import RNGDataFlow
try: try:
import h5py import h5py
except ImportError: except ImportError:
logger.warn("Error in 'import h5py'. HDF5Data won't be available.") logger.warn_dependency("HDF5Data", 'h5py')
__all__ = [] __all__ = []
else: else:
__all__ = ['HDF5Data'] __all__ = ['HDF5Data']
...@@ -21,14 +21,14 @@ else: ...@@ -21,14 +21,14 @@ else:
try: try:
import lmdb import lmdb
except ImportError: except ImportError:
logger.warn("Error in 'import lmdb'. LMDBData won't be available.") logger.warn_dependency("LMDBData", 'lmdb')
else: else:
__all__.extend(['LMDBData', 'CaffeLMDB', 'LMDBDataDecoder']) __all__.extend(['LMDBData', 'CaffeLMDB', 'LMDBDataDecoder'])
try: try:
import sklearn.datasets import sklearn.datasets
except ImportError: except ImportError:
logger.warn("Error in 'import sklearn'. SVMLightData won't be available.") logger.warn_dependency('SVMLightData', 'sklearn')
else: else:
__all__.extend(['SVMLightData']) __all__.extend(['SVMLightData'])
......
...@@ -21,7 +21,7 @@ __all__ = ['PrefetchData', 'BlockParallel'] ...@@ -21,7 +21,7 @@ __all__ = ['PrefetchData', 'BlockParallel']
try: try:
import zmq import zmq
except ImportError: except ImportError:
logger.warn("Error in 'import zmq'. PrefetchDataZMQ won't be available.") logger.warn_dependency('PrefetchDataZMQ', 'zmq')
else: else:
__all__.extend(['PrefetchDataZMQ', 'PrefetchOnGPUs']) __all__.extend(['PrefetchDataZMQ', 'PrefetchOnGPUs'])
......
...@@ -9,7 +9,7 @@ from ..utils import logger ...@@ -9,7 +9,7 @@ from ..utils import logger
try: try:
import tensorflow as tf import tensorflow as tf
except ImportError: except ImportError:
logger.warn("Cannot import tensorflow. TFFuncMapper won't be available.") logger.warn_dependency('TFFuncMapper', 'tensorflow')
__all__ = [] __all__ = []
else: else:
__all__ = ['TFFuncMapper'] __all__ = ['TFFuncMapper']
......
...@@ -69,21 +69,12 @@ class StaticDynamicShape(object): ...@@ -69,21 +69,12 @@ class StaticDynamicShape(object):
def __init__(self, static, dynamic): def __init__(self, static, dynamic):
self.static = static self.static = static
self.dynamic = dynamic self.dynamic = dynamic
def apply(self, f):
def apply_dynamic(self, f):
try:
return f(self.static)
except:
return f(self.dynamic)
def apply_static(self, f):
try: try:
return f(self.static) st = f(self.static)
return StaticDynamicShape(st, st)
except: except:
return None return StaticDynamicShape(None, f(self.dynamic))
def apply(self, f):
return StaticDynamicShape(self.apply_static(f), self.apply_dynamic(f))
@layer_register() @layer_register()
def Deconv2D(x, out_shape, kernel_shape, def Deconv2D(x, out_shape, kernel_shape,
......
...@@ -21,7 +21,7 @@ try: ...@@ -21,7 +21,7 @@ try:
else: else:
from concurrent.futures import Future from concurrent.futures import Future
except ImportError: except ImportError:
logger.warn("Cannot import Future in tornado.concurrent. MultiThreadAsyncPredictor won't be available.") logger.warn_dependency('MultiThreadAsyncPredictor', 'tornado.concurrent')
__all__ = ['MultiProcessPredictWorker', 'MultiProcessQueuePredictWorker'] __all__ = ['MultiProcessPredictWorker', 'MultiProcessQueuePredictWorker']
else: else:
__all__ = ['MultiProcessPredictWorker', 'MultiProcessQueuePredictWorker', __all__ = ['MultiProcessPredictWorker', 'MultiProcessQueuePredictWorker',
......
...@@ -115,3 +115,6 @@ def auto_set_dir(action=None, overwrite=False): ...@@ -115,3 +115,6 @@ def auto_set_dir(action=None, overwrite=False):
os.path.join('train_log', os.path.join('train_log',
basename[:basename.rfind('.')]), basename[:basename.rfind('.')]),
action=action) action=action)
def warn_dependency(name, dependencies):
warn("Failed to import '{}', {} won't be available'".format(dependencies, name))
...@@ -200,7 +200,6 @@ def dump_dataflow_images(df, index=0, batched=True, ...@@ -200,7 +200,6 @@ def dump_dataflow_images(df, index=0, batched=True,
nr_row=viz[0], nr_col=viz[1], viz=True)) nr_row=viz[0], nr_col=viz[1], viz=True))
vizlist = vizlist[vizsize:] vizlist = vizlist[vizsize:]
if __name__ == '__main__': if __name__ == '__main__':
import cv2 import cv2
imglist = [] imglist = []
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment