diff --git a/src/operator/leaky_relu-inl.h b/src/operator/leaky_relu-inl.h index 3d81cfc0d967..f77923582795 100644 --- a/src/operator/leaky_relu-inl.h +++ b/src/operator/leaky_relu-inl.h @@ -335,6 +335,7 @@ void LeakyReLUCompute(const nnvm::NodeAttrs& attrs, const OpContext& ctx, const std::vector& inputs, const std::vector& req, const std::vector& outputs) { + if (inputs[0].Size() == 0U) return; const LeakyReLUParam ¶m = nnvm::get(attrs.parsed); const std::vector no_use_but_adapt_origin_api; size_t expected = param.act_type == leakyrelu::kPReLU ? 2 : 1; @@ -352,6 +353,7 @@ void LeakyReLUGradCompute(const nnvm::NodeAttrs& attrs, const std::vector& inputs, const std::vector& req, const std::vector& outputs) { + if (inputs[0].Size() == 0U) return; const LeakyReLUParam& param = nnvm::get(attrs.parsed); const std::vector no_use_but_adapt_origin_api; // inputs: out_grad, input_data, input_gamma, output, output_mask diff --git a/src/operator/leaky_relu.cc b/src/operator/leaky_relu.cc index 681ca44b357f..ae18d721b0c1 100644 --- a/src/operator/leaky_relu.cc +++ b/src/operator/leaky_relu.cc @@ -90,6 +90,7 @@ static void LeakyReLUComputeExCPU(const nnvm::NodeAttrs& attrs, const std::vector& inputs, const std::vector& req, const std::vector& outputs) { + if (inputs[0].shape().Size() == 0U) return; const LeakyReLUParam& param = nnvm::get(attrs.parsed); size_t expected = param.act_type == leakyrelu::kPReLU ? 2 : 1; CHECK_EQ(inputs.size(), expected); @@ -107,6 +108,7 @@ void LeakyReLUGradComputeExCPU(const nnvm::NodeAttrs& attrs, const std::vector& inputs, const std::vector& req, const std::vector& outputs) { + if (inputs[0].shape().Size() == 0U) return; const LeakyReLUParam& param = nnvm::get(attrs.parsed); if (SupportMKLDNNLeakyRelu(param, inputs[0])) { std::vector in_data{inputs[0], inputs[1]}; diff --git a/src/operator/nn/mkldnn/mkldnn_act-inl.h b/src/operator/nn/mkldnn/mkldnn_act-inl.h index cf3e4f47d1ff..70bf16a14369 100644 --- a/src/operator/nn/mkldnn/mkldnn_act-inl.h +++ b/src/operator/nn/mkldnn/mkldnn_act-inl.h @@ -74,13 +74,6 @@ MKLDNNActForward &GetActForward(const MKLDNNActParam& param, const OpContext &ctx, const NDArray &in_data, const mkldnn::memory &in_mem); -void MKLDNNActivationForward(const nnvm::NodeAttrs& attrs, const OpContext &ctx, - const NDArray &in_data, const OpReqType &req, - const NDArray &out_data); -void MKLDNNLeakyReluForward(const nnvm::NodeAttrs& attrs, const OpContext &ctx, - const NDArray &in_data, const OpReqType &req, - const NDArray &out_data); - mkldnn::eltwise_backward::primitive_desc GetActBwdDescImpl( const MKLDNNActParam ¶m, const mkldnn::memory &input_mem, const mkldnn::memory &diff_dst_memory); diff --git a/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc b/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc index 86acac880cb2..f7520d57a290 100644 --- a/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc +++ b/src/operator/quantization/mkldnn/mkldnn_quantized_act.cc @@ -24,7 +24,7 @@ */ #if MXNET_USE_MKLDNN == 1 -#include "../../nn/mkldnn/mkldnn_act-inl.h" +#include "../../nn/mkldnn/mkldnn_ops-inl.h" #include "../quantization_utils.h" namespace mxnet { diff --git a/tests/python/unittest/test_smoke.py b/tests/python/unittest/test_smoke.py index 26cd4e542f88..c14310cd7f98 100644 --- a/tests/python/unittest/test_smoke.py +++ b/tests/python/unittest/test_smoke.py @@ -56,3 +56,14 @@ def test_18933_channel_0(): with autograd.record(): a = npx.instance_norm(arr, gamma, beta) a.backward() + +@use_np +@with_environment('MXNET_ENGINE_TYPE', 'NaiveEngine') +def test_18934_empty_leaky_relu(): + arr = np.random.rand(0,2) + arr_grad = np.empty_like(arr) + + autograd.mark_variables([arr], [arr_grad]) + with autograd.record(): + res = npx.leaky_relu(arr) + res.backward() \ No newline at end of file