Skip to content
Projects
Groups
Snippets
Help
Loading...
Help
Support
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in
Toggle navigation
S
seminar-breakout
Project overview
Project overview
Details
Activity
Releases
Repository
Repository
Files
Commits
Branches
Tags
Contributors
Graph
Compare
Issues
0
Issues
0
List
Boards
Labels
Milestones
Merge Requests
0
Merge Requests
0
CI / CD
CI / CD
Pipelines
Jobs
Schedules
Analytics
Analytics
CI / CD
Repository
Value Stream
Wiki
Wiki
Members
Members
Collapse sidebar
Close sidebar
Activity
Graph
Create a new issue
Jobs
Commits
Issue Boards
Open sidebar
Shashank Suhas
seminar-breakout
Commits
5deebdcb
Commit
5deebdcb
authored
Oct 31, 2016
by
Yuxin Wu
Browse files
Options
Browse Files
Download
Email Patches
Plain Diff
fix imports
parent
0b2d375d
Changes
4
Hide whitespace changes
Inline
Side-by-side
Showing
4 changed files
with
4 additions
and
8 deletions
+4
-8
tensorpack/RL/expreplay.py
tensorpack/RL/expreplay.py
+0
-5
tensorpack/callbacks/base.py
tensorpack/callbacks/base.py
+1
-1
tensorpack/callbacks/inference.py
tensorpack/callbacks/inference.py
+1
-1
tensorpack/train/base.py
tensorpack/train/base.py
+2
-1
No files found.
tensorpack/RL/expreplay.py
View file @
5deebdcb
...
...
@@ -33,7 +33,6 @@ class ExpReplay(DataFlow, Callback):
player
,
batch_size
=
32
,
memory_size
=
1e6
,
populate_size
=
None
,
# deprecated
init_memory_size
=
50000
,
exploration
=
1
,
end_exploration
=
0.1
,
...
...
@@ -50,10 +49,6 @@ class ExpReplay(DataFlow, Callback):
:param update_frequency: number of new transitions to add to memory
after sampling a batch of transitions for training
"""
# XXX back-compat
if
populate_size
is
not
None
:
logger
.
warn
(
"populate_size in ExpReplay is deprecated in favor of init_memory_size"
)
init_memory_size
=
populate_size
init_memory_size
=
int
(
init_memory_size
)
for
k
,
v
in
locals
()
.
items
():
...
...
tensorpack/callbacks/base.py
View file @
5deebdcb
...
...
@@ -28,7 +28,7 @@ class Callback(object):
Called before finalizing the graph.
Use this callback to setup some ops used in the callback.
:param trainer:
a
:class:`train.Trainer` instance
:param trainer: :class:`train.Trainer` instance
"""
self
.
trainer
=
trainer
self
.
graph
=
tf
.
get_default_graph
()
...
...
tensorpack/callbacks/inference.py
View file @
5deebdcb
...
...
@@ -13,7 +13,7 @@ from six.moves import zip, map
from
..dataflow
import
DataFlow
from
..utils
import
get_tqdm_kwargs
,
logger
from
..utils.stat
import
RatioCounter
,
BinaryStatistics
from
..tfutils
import
get_op_tensor_name
from
..tfutils
import
get_op_tensor_name
,
get_op_var_name
from
.base
import
Callback
__all__
=
[
'InferenceRunner'
,
'ClassificationError'
,
...
...
tensorpack/train/base.py
View file @
5deebdcb
...
...
@@ -5,6 +5,7 @@
from
abc
import
ABCMeta
,
abstractmethod
import
signal
import
re
import
weakref
from
six.moves
import
range
import
tqdm
...
...
@@ -108,7 +109,7 @@ class Trainer(object):
get_global_step_var
()
# ensure there is such var, before finalizing the graph
logger
.
info
(
"Setup callbacks ..."
)
callbacks
=
self
.
config
.
callbacks
callbacks
.
setup_graph
(
self
)
# TODO use weakref instead?
callbacks
.
setup_graph
(
weakref
.
proxy
(
self
))
self
.
_init_summary
()
logger
.
info
(
"Initializing graph variables ..."
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment