Commit f873681f authored by Yuxin Wu's avatar Yuxin Wu

bug fix

parent 491e7144
...@@ -79,7 +79,7 @@ class CaffeLayerProcessor(object): ...@@ -79,7 +79,7 @@ class CaffeLayerProcessor(object):
def proc_scale(self, idx, name, param): def proc_scale(self, idx, name, param):
bottom_name = self.net.bottom_names[name][0] bottom_name = self.net.bottom_names[name][0]
# find te bn layer before this scaling # find the bn layer before this scaling
for i, layer in enumerate(self.net.layers): for i, layer in enumerate(self.net.layers):
if layer.type == 'BatchNorm': if layer.type == 'BatchNorm':
name2 = self.layer_names[i] name2 = self.layer_names[i]
...@@ -88,8 +88,8 @@ class CaffeLayerProcessor(object): ...@@ -88,8 +88,8 @@ class CaffeLayerProcessor(object):
# scaling and BN share the same bottom, should merge # scaling and BN share the same bottom, should merge
logger.info("Merge {} and {} into one BatchNorm layer".format( logger.info("Merge {} and {} into one BatchNorm layer".format(
name, name2)) name, name2))
return {name + '/beta': param[1].data, return {name2 + '/beta': param[1].data,
name + '/gamma': param[0].data } name2 + '/gamma': param[0].data }
# assume this scaling layer is part of some BN # assume this scaling layer is part of some BN
logger.error("Could not find a BN layer corresponding to this Scale layer!") logger.error("Could not find a BN layer corresponding to this Scale layer!")
raise ValueError() raise ValueError()
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment