Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.

Commit 7729114

Browse files
committed
Remove backward_elemwiseaddex
1 parent 541aebb commit 7729114

File tree

1 file changed

+0
-23
lines changed

1 file changed

+0
-23
lines changed

src/operator/tensor/elemwise_binary_op_basic.cc

Lines changed: 0 additions & 23 deletions
Original file line numberDiff line numberDiff line change
@@ -114,29 +114,6 @@ The storage type of ``elemwise_add`` output depends on storage types of inputs
114114
// this must differ from elemwise_add to prevent add to optimization in forward pass.
115115
MXNET_OPERATOR_REGISTER_BINARY_WITH_SPARSE_CPU(_grad_add, op::mshadow_op::plus);
116116

117-
static void _backward_ElemwiseAddEx(const nnvm::NodeAttrs& attrs,
118-
const OpContext& ctx,
119-
const std::vector<NDArray>& inputs,
120-
const std::vector<OpReqType>& req,
121-
const std::vector<NDArray>& outputs) {
122-
CHECK_EQ(inputs.size(), 1U);
123-
CHECK_EQ(outputs.size(), 2U);
124-
#if MXNET_USE_MKLDNN == 1
125-
if (inputs[0].IsMKLDNNData()) {
126-
MKLDNNRun(MKLDNNCopy, attrs, ctx, inputs[0], req[0], outputs[0]);
127-
MKLDNNRun(MKLDNNCopy, attrs, ctx, inputs[0], req[1], outputs[1]);
128-
return;
129-
} else if (common::ContainsOnlyStorage(inputs, kDefaultStorage)) {
130-
FallBackCompute(
131-
ElemwiseBinaryOp::BackwardUseNone<cpu, mshadow_op::identity, mshadow_op::identity>,
132-
attrs, ctx, inputs, req, outputs);
133-
return;
134-
}
135-
#endif
136-
ElemwiseBinaryOp::BackwardUseNoneEx<cpu, mshadow_op::identity, mshadow_op::identity>(
137-
attrs, ctx, inputs, req, outputs);
138-
}
139-
140117
static inline bool ElemwiseAddBackwardStorageType(const nnvm::NodeAttrs& attrs,
141118
const int dev_mask,
142119
DispatchMode* dispatch_mode,

0 commit comments

Comments
 (0)