Skip to content
This repository was archived by the owner on Nov 17, 2023. It is now read-only.

Commit 2e76582

Browse files
committed
switch to unique_tpr
1 parent f254deb commit 2e76582

File tree

4 files changed

+6
-9
lines changed

4 files changed

+6
-9
lines changed

src/operator/operator_tune-inl.h

Lines changed: 3 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -116,10 +116,6 @@ class OperatorTune : public OperatorTuneByType<DType> {
116116
TuneAll();
117117
}
118118

119-
~OperatorTune() {
120-
delete[] data_set_;
121-
}
122-
123119
/*!
124120
* \brief Initialize the OperatorTune object
125121
* \return Whether the OperatorTune object was successfully initialized
@@ -128,7 +124,8 @@ class OperatorTune : public OperatorTuneByType<DType> {
128124
if (!initialized_) {
129125
initialized_ = true;
130126
// Generate some random data for calling the operator kernels
131-
data_set_ = reinterpret_cast<DType*>(new char[0x100 * sizeof(DType)]);
127+
data_set_ =
128+
std::unique_ptr<DType[]>(reinterpret_cast<DType*>(new char[0x100 * sizeof(DType)]));
132129
std::random_device rd;
133130
std::mt19937 gen(rd());
134131
if (!std::is_integral<DType>::value) {
@@ -521,7 +518,7 @@ class OperatorTune : public OperatorTuneByType<DType> {
521518
/*! \brief Number of passes to obtain an average */
522519
static constexpr duration_t OUTSIDE_COUNT = (1 << OUTSIDE_COUNT_SHIFT);
523520
/*! \brief Random data for timing operator calls */
524-
static DType* data_set_;
521+
static std::unique_ptr<DType[]> data_set_;
525522
/*! \brief Operators tuned */
526523
static std::unordered_set<std::string> operator_names_;
527524
/*! \brief Arbitary object to modify in OMP loop */

src/operator/operator_tune.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,7 @@ double OperatorTuneBase::tuning_weight_scale_ = 0.0;
3939
*/
4040
#define IMPLEMENT_OPERATOR_TUNE_STATICS_FOR_TYPE(__typ$) \
4141
template<> bool OperatorTune<__typ$>::initialized_ = false; \
42-
template<> __typ$* OperatorTune<__typ$>::data_set_ = nullptr; \
42+
template<> std::unique_ptr<__typ$[]> OperatorTune<__typ$>::data_set_ = nullptr; \
4343
template<> volatile tune::TuningMode OperatorTuneByType<__typ$>::tuning_mode_ = tune::kAuto; \
4444
template<> volatile int OperatorTune<__typ$>::volatile_int_ = 9; /* arbitrary number */ \
4545
template<> std::unordered_set<std::string> OperatorTune<__typ$>::operator_names_({}); \

src/operator/tensor/elemwise_binary_scalar_op.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -236,7 +236,7 @@ class BinaryScalarOp : public UnaryOp {
236236
using namespace mshadow::expr;
237237
Stream<xpu> *s = ctx.get_stream<xpu>();
238238
const double alpha = nnvm::get<double>(attrs.parsed);
239-
MSHADOW_TYPE_SWITCH(outputs[0].type_flag_, DType, {
239+
MSHADOW_TYPE_SWITCH_WITH_BOOL(outputs[0].type_flag_, DType, {
240240
MXNET_ASSIGN_REQ_SWITCH(req[0], Req, {
241241
mxnet_op::Kernel<mxnet_op::op_with_req<OP, Req>, xpu>::Launch(
242242
s, inputs[0].Size(), outputs[0].dptr<DType>(), inputs[0].dptr<DType>(), DType(alpha));

tests/python/unittest/test_numpy_op.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1669,7 +1669,7 @@ def hybrid_forward(self, F, a, b, *args, **kwargs):
16691669
mx_test_x1 = mx.numpy.array(np_test_x1, dtype=ltype)
16701670
mx_test_x2 = mx.numpy.array(np_test_x2, dtype=rtype)
16711671
rtol = 1e-2 if ltype is np.float16 or rtype is np.float16 else 1e-3
1672-
atol = 1e-4 if ltype is np.float16 or rtype is np.float16 else 1e-5
1672+
atol = 1e-3 if ltype is np.float16 or rtype is np.float16 else 1e-5
16731673
for hybridize in [True, False]:
16741674
if hybridize:
16751675
mx_func.hybridize()

0 commit comments

Comments
 (0)