Skip to content

Commit

Permalink
feat(aten::leaky_relu_): Adding alias for inplace leaky relu
Browse files Browse the repository at this point in the history
Signed-off-by: Naren Dasan <naren@narendasan.com>
Signed-off-by: Naren Dasan <narens@nvidia.com>
  • Loading branch information
narendasan committed Feb 2, 2021
1 parent a1d1686 commit bc53411
Showing 1 changed file with 14 additions and 0 deletions.
14 changes: 14 additions & 0 deletions core/conversion/converters/impl/activation.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -126,6 +126,20 @@ auto acthardtanh TRTORCH_UNUSED =
return true;
}})
.pattern({"aten::leaky_relu(Tensor self, Scalar negative_slope=0.01) -> (Tensor)",
[](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
auto self = args[0].ITensorOrFreeze(ctx);
auto negative_slopeScalar = args[1].unwrapToScalar().to<float>();

auto new_layer = ctx->net->addActivation(*self, nvinfer1::ActivationType::kLEAKY_RELU);
new_layer->setAlpha(negative_slopeScalar);

new_layer->setName(util::node_info(n).c_str());
auto out_tensor = new_layer->getOutput(0);
out_tensor = ctx->AssociateValueAndTensor(n->outputs()[0], out_tensor);
LOG_DEBUG("Output shape: " << out_tensor->getDimensions());
return true;
}})
.pattern({"aten::leaky_relu_(Tensor(a!) self, Scalar negative_slope=0.01) -> Tensor(a!)",
[](ConversionCtx* ctx, const torch::jit::Node* n, args& args) -> bool {
auto self = args[0].ITensorOrFreeze(ctx);
auto negative_slopeScalar = args[1].unwrapToScalar().to<float>();
Expand Down

0 comments on commit bc53411

Please sign in to comment.