Commit 5a461be1 authored by Yuxin Wu's avatar Yuxin Wu

Pretrained model for FasterRCNN/ShuffleNet

parent 8a341291
......@@ -37,6 +37,11 @@ To predict on an image (and show output in a window):
./train.py --predict input.jpg
```
To evaluate the performance (pretrained models can be downloaded in [model zoo](https://drive.google.com/open?id=1J0xuDAuyOWiuJRm2LfGoz5PUv9_dKuxq):
```
./train.py --evaluate output.json --load /path/to/model
```
## Results
Mean Average Precision @IoU=0.50:0.95:
......
......@@ -21,3 +21,8 @@ Train (takes 24 hours on 8 Maxwell TitanX):
```bash
./shufflenet.py --data /path/to/ilsvrc/
```
Eval the [pretrained model](https://drive.google.com/open?id=1Q1C2BCkbOK2HfhUB0Yfn_W_F68bqnA6y):
```
./shufflenet.py --eval --data /path/to/ilsvrc --load /path/to/model
```
......@@ -21,7 +21,8 @@ from tensorpack.tfutils.scope_utils import under_name_scope
from tensorpack.utils.gpu import get_nr_gpu
from imagenet_utils import (
fbresnet_augmentor, get_imagenet_dataflow, ImageNetModel, GoogleNetResize)
fbresnet_augmentor, get_imagenet_dataflow,
ImageNetModel, GoogleNetResize, eval_on_ILSVRC12)
TOTAL_BATCH_SIZE = 256
......@@ -178,6 +179,7 @@ if __name__ == '__main__':
parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.')
parser.add_argument('--data', help='ILSVRC dataset dir')
parser.add_argument('--load', help='load model')
parser.add_argument('--eval', action='store_true')
parser.add_argument('--flops', action='store_true', help='print flops and exit')
args = parser.parse_args()
......@@ -186,7 +188,11 @@ if __name__ == '__main__':
model = Model()
if args.flops:
if args.eval:
batch = 128 # something that can run on one gpu
ds = get_data('val', batch)
eval_on_ILSVRC12(model, get_model_loader(args.load), ds)
elif args.flops:
# manually build the graph with batch=1
input_desc = [
InputDesc(tf.float32, [1, 224, 224, 3], 'input'),
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment