Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.

Commit a60d44b

Browse files
author
Hao Jin
committed
Get rid of older code
1 parent 700f9bf commit a60d44b

File tree

1 file changed

+0
-2
lines changed

1 file changed

+0
-2
lines changed

src/operator/l2_normalization-inl.h

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,7 +95,6 @@ class L2NormalizationOp : public Operator {
9595
.get_with_shape<xpu, 2, DType>(dshape, s);
9696
Tensor<xpu, 1, DType> norm = out_data[l2_normalization::kNorm].get<xpu, 1, DType>(s);
9797
norm = sumall_except_dim<0>(F<mxnet::op::mshadow_op::square>(data));
98-
// norm = F<mxnet::op::mshadow_op::square_root>(norm + DType(param_.eps));
9998
MXNET_ASSIGN_REQ_SWITCH(req[0], Req, {
10099
mxnet_op::Kernel<mxnet_op::op_with_req<mxnet::op::mshadow_op::plus, Req>, xpu>::Launch(
101100
s, norm.size(0), norm.dptr_, norm.dptr_, DType(param_.eps));
@@ -114,7 +113,6 @@ class L2NormalizationOp : public Operator {
114113
Tensor<xpu, 2, DType> norm = out_data[l2_normalization::kNorm]
115114
.get_with_shape<xpu, 2, DType>(norm_shape, s);
116115
norm = reduce_with_axis<red::sum, false>(F<mxnet::op::mshadow_op::square>(data), 1);
117-
// norm = F<mxnet::op::mshadow_op::square_root>(norm + DType(param_.eps));
118116
MXNET_ASSIGN_REQ_SWITCH(req[0], Req, {
119117
mxnet_op::Kernel<mxnet_op::op_with_req<mxnet::op::mshadow_op::plus, Req>, xpu>::Launch(
120118
s, norm.size(0) * norm.size(1), norm.dptr_, norm.dptr_, DType(param_.eps));

0 commit comments

Comments
 (0)