Skip to content

Commit 6543488

Browse files
royweianirudh2290
authored andcommitted
Revert "Improve FC perf when no_bias=False (apache#15033)" (apache#15099)
This reverts commit 6cf964a.
1 parent 5fc4fc5 commit 6543488

File tree

3 files changed

+3
-34
lines changed

3 files changed

+3
-34
lines changed

src/operator/nn/fully_connected-inl.h

Lines changed: 1 addition & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -36,7 +36,6 @@
3636
#include "../elemwise_op_common.h"
3737
#include "../linalg.h"
3838
#include "../../common/utils.h"
39-
#include "../tensor/broadcast_reduce_op.h"
4039

4140
namespace mxnet {
4241
namespace op {
@@ -170,18 +169,7 @@ void FCBackward(const OpContext &ctx, const FullyConnectedParam &param,
170169
// gradient of bias
171170
if (!param.no_bias) {
172171
Tensor<xpu, 1, DType> gbias = in_grad[fullc::kBias].get<xpu, 1, DType>(s);
173-
TBlob grad_blob = TBlob(grad);
174-
TBlob gbias_blob = TBlob(gbias);
175-
mxnet::TShape x(1, 0);
176-
mxnet::TShape small;
177-
if (shape_assign(&gbias_blob.shape_, Shape2(param.num_hidden, 1))) {
178-
small = gbias_blob.shape_;
179-
} else {
180-
small = ReduceAxesShapeImpl(grad_blob.shape_, dmlc::optional<mxnet::TShape>(x), true, false);
181-
}
182-
ReduceAxesComputeImpl<xpu, mshadow::red::sum, false, false,
183-
mshadow_op::identity>(ctx, {grad_blob}, {req[fullc::kBias]},
184-
{in_grad[fullc::kBias]}, small);
172+
Assign(gbias, req[fullc::kBias], sum_rows(grad));
185173
}
186174
// gradient of data
187175
// Legacy approach shown here for comparison:

src/operator/nn/fully_connected.cc

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -316,9 +316,11 @@ NNVM_REGISTER_OP(_backward_FullyConnected)
316316
const FullyConnectedParam& params = nnvm::get<FullyConnectedParam>(attrs.parsed);
317317
return params.no_bias ? 2 : 3;
318318
})
319+
#if MXNET_USE_MKLDNN == 1
319320
.set_attr<FResourceRequest>("FResourceRequest", [](const NodeAttrs& n) {
320321
return std::vector<ResourceRequest>{ResourceRequest::kTempSpace};
321322
})
323+
#endif
322324
.set_attr<nnvm::TIsBackward>("TIsBackward", true)
323325
.set_attr<nnvm::FInplaceOption>("FInplaceOption", [](const NodeAttrs& attrs){
324326
return std::vector<std::pair<int, int> >{{1, 0}};

tests/python/unittest/test_operator.py

Lines changed: 0 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -696,27 +696,6 @@ def test_symbol_pow():
696696
check_symbolic_backward(test, [data_tmp, exp_tmp], [np.ones(shape)], [data_dir, exp_dir])
697697

698698

699-
@with_seed()
700-
def test_fully_connected():
701-
data = mx.sym.var("data")
702-
fc_weight = mx.sym.var("weight")
703-
fc_bias = mx.sym.var("bias")
704-
fc = mx.sym.FullyConnected(data=data, weight=fc_weight, bias=fc_bias, num_hidden=10, no_bias=False, name='fc')
705-
data = mx.nd.random.uniform(shape=(5, 5, 5, 13), dtype=np.float32)
706-
fc_weight = mx.nd.random.uniform(shape=(10, 325), dtype=np.float32)
707-
fc_bias = mx.nd.random.uniform(shape=(10), dtype=np.float32)
708-
fc_bias2 = mx.nd.random.uniform(shape=(10, 1), dtype=np.float32)
709-
data_np = data.asnumpy().reshape(5, 325)
710-
fc_weight_np = np.transpose(fc_weight.asnumpy())
711-
fc_bias_np = fc_bias.asnumpy()
712-
res = np.dot(data_np, fc_weight_np) + fc_bias.asnumpy()
713-
check_symbolic_forward(fc, {'data': data_np, 'weight': fc_weight.asnumpy(), 'bias': fc_bias_np}, {'fc_output': res})
714-
check_numeric_gradient(fc, {'data': data_np, 'weight': fc_weight.asnumpy(), 'bias': fc_bias_np},
715-
numeric_eps=1e-2, rtol=1e-4, atol=1e-2)
716-
# TODO: Fix Bug #15032 when bias has ndim > 1
717-
#check_symbolic_forward(fc, {'data': data_np, 'weight': fc_weight.asnumpy(), 'bias': fc_bias2.asnumpy()}, {'fc_output': res})
718-
719-
720699
@with_seed()
721700
def test_pow_fn():
722701
shape = (3, 4)

0 commit comments

Comments
 (0)