Skip to content
This repository has been archived by the owner on Jan 3, 2023. It is now read-only.

Commit

Permalink
Zp/roll back conv bias (#1000)
Browse files Browse the repository at this point in the history
* roll back conv_bias
  • Loading branch information
airofjune authored and wei-v-wang committed Sep 27, 2017
1 parent 199f215 commit c59bf58
Show file tree
Hide file tree
Showing 3 changed files with 10 additions and 5 deletions.
6 changes: 4 additions & 2 deletions neon/layers/layer.py
Original file line number Diff line number Diff line change
Expand Up @@ -1794,7 +1794,8 @@ def __init__(self, fshape, init, strides={}, padding={}, dilation={},
name=None):
super(Conv, self).__init__(bias=bias, batch_norm=batch_norm,
activation=activation, name=name)
if bias and NervanaObject.be.is_mkl():
# temp fall back to old conv and bias for bug of weights save/load
if False and bias and NervanaObject.be.is_mkl():
self.append(Convolution_bias(fshape=fshape, strides=strides, padding=padding,
dilation=dilation, init=init, bsum=batch_norm, bias=bias,
name=name))
Expand All @@ -1807,7 +1808,8 @@ def __init__(self, fshape, init, strides={}, padding={}, dilation={},
def add_postfilter_layers(self):
self.init_base_name()
# mklbackend will do conv+bias
if self.bias is not None and not NervanaObject.be.is_mkl():
# fall back
if self.bias is not None:
name = self.base_name + '_bias'
self.append(Bias(init=self.bias, name=name))
if self.batch_norm:
Expand Down
3 changes: 2 additions & 1 deletion tests/test_optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -388,7 +388,8 @@ def test_multi_optimizer(backend_default_mkl):
layers_to_optimize1 = [l for l in layer_list1 if isinstance(l, ParameterLayer)]
layers_to_optimize2 = [l for l in layer_list2 if isinstance(l, ParameterLayer)]
opt.optimize(layers_to_optimize1, 0)
if l1[0].be.is_mkl():
# temp roll back conv_bias
if False and l1[0].be.is_mkl():
assert opt.map_list[opt_adam][0].__class__.__name__ is 'Convolution_bias'
else:
assert opt.map_list[opt_adam][0].__class__.__name__ is 'Convolution'
Expand Down
6 changes: 4 additions & 2 deletions tests/test_wrapper_layers.py
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ def test_conv_wrapper(backend_default):

conv = Conv((4, 4, 3), Uniform(), bias=Uniform())
assert isinstance(conv, list)
if conv[0].be.is_mkl():
# temp roll back conv_bias
if False and conv[0].be.is_mkl():
assert len(conv) == 1
assert isinstance(conv[0], Convolution_bias)
else:
Expand All @@ -44,7 +45,8 @@ def test_conv_wrapper(backend_default):

conv = Conv((4, 4, 3), Uniform(), bias=Uniform(), activation=Rectlin())
assert isinstance(conv, list)
if conv[0].be.is_mkl():
# temp roll back conv_bias
if False and conv[0].be.is_mkl():
assert isinstance(conv[0], Convolution_bias)
assert isinstance(conv[1], Activation)
assert len(conv) == 2
Expand Down

0 comments on commit c59bf58

Please sign in to comment.