Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

pnnx convert nn.Softmax2d #4324

Merged
merged 3 commits into from
Nov 2, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion tools/pnnx/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -547,7 +547,7 @@ TORCH_LIBRARY(upfirdn2d_op, m) {
|nn.Sigmoid | :heavy_check_mark: | :heavy_check_mark: |
|nn.SiLU | :heavy_check_mark: | :heavy_check_mark: |
|nn.Softmax | :heavy_check_mark: | :heavy_check_mark: |
|nn.Softmax2d | |
|nn.Softmax2d | :heavy_check_mark: | :heavy_check_mark: |
|nn.Softmin | :heavy_check_mark: |
|nn.Softplus | :heavy_check_mark: |
|nn.Softshrink | :heavy_check_mark: |
Expand Down
2 changes: 2 additions & 0 deletions tools/pnnx/src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ set(pnnx_pass_level1_SRCS
pass_level1/nn_Sigmoid.cpp
pass_level1/nn_SiLU.cpp
pass_level1/nn_Softmax.cpp
pass_level1/nn_Softmax2d.cpp
pass_level1/nn_Softmin.cpp
pass_level1/nn_Softplus.cpp
pass_level1/nn_Softshrink.cpp
Expand Down Expand Up @@ -454,6 +455,7 @@ set(pnnx_pass_ncnn_SRCS
pass_ncnn/nn_Sigmoid.cpp
pass_ncnn/nn_SiLU.cpp
pass_ncnn/nn_Softmax.cpp
pass_ncnn/nn_Softmax2d.cpp
pass_ncnn/nn_Tanh.cpp
pass_ncnn/nn_Upsample.cpp
pass_ncnn/nn_UpsamplingBilinear2d.cpp
Expand Down
37 changes: 37 additions & 0 deletions tools/pnnx/src/pass_level1/nn_Softmax2d.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2022 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#include "pass_level1.h"

#include "../utils.h"

namespace pnnx {

class Softmax2d : public FuseModulePass
{
public:
const char* match_type_str() const
{
return "__torch__.torch.nn.modules.activation.Softmax2d";
}

const char* type_str() const
{
return "nn.Softmax2d";
}
};

REGISTER_GLOBAL_PNNX_FUSE_MODULE_PASS(Softmax2d)

} // namespace pnnx
55 changes: 55 additions & 0 deletions tools/pnnx/src/pass_ncnn/nn_Softmax2d.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,55 @@
// Tencent is pleased to support the open source community by making ncnn available.
//
// Copyright (C) 2022 THL A29 Limited, a Tencent company. All rights reserved.
//
// Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
// in compliance with the License. You may obtain a copy of the License at
//
// https://opensource.org/licenses/BSD-3-Clause
//
// Unless required by applicable law or agreed to in writing, software distributed
// under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
// CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.

#include "pass_ncnn.h"

namespace pnnx {

namespace ncnn {

class nn_Softmax2d : public GraphRewriterPass
{
public:
const char* match_pattern_graph() const
{
return R"PNNXIR(7767517
3 2
pnnx.Input input 0 1 input
nn.Softmax2d op_0 1 1 input out
pnnx.Output output 1 0 out
)PNNXIR";
}

const char* type_str() const
{
return "Softmax";
}

const char* name_str() const
{
return "softmax2d";
}

void write(Operator* op, const std::map<std::string, Parameter>& /*captured_params*/) const
{
op->params["0"] = 0;
op->params["1"] = 1;
}
};

REGISTER_GLOBAL_PNNX_NCNN_GRAPH_REWRITER_PASS(nn_Softmax2d, 20)

} // namespace ncnn

} // namespace pnnx
1 change: 1 addition & 0 deletions tools/pnnx/src/pass_ncnn/solve_batch_index.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -98,6 +98,7 @@ static bool is_known_operator_with_batch_index_0(const Operator* op)
"nn.ReplicationPad1d",
"nn.ReplicationPad2d",
"nn.ReplicationPad3d",
"nn.Softmax2d",
"nn.Upsample",
"nn.UpsamplingBilinear2d",
"nn.UpsamplingNearest2d",
Expand Down
1 change: 1 addition & 0 deletions tools/pnnx/tests/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -143,6 +143,7 @@ pnnx_add_test(nn_SELU)
pnnx_add_test(nn_Sigmoid)
pnnx_add_test(nn_SiLU)
pnnx_add_test(nn_Softmax)
pnnx_add_test(nn_Softmax2d)
pnnx_add_test(nn_Softmin)
pnnx_add_test(nn_Softplus)
pnnx_add_test(nn_Softshrink)
Expand Down
1 change: 1 addition & 0 deletions tools/pnnx/tests/ncnn/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -115,6 +115,7 @@ pnnx_ncnn_add_test(nn_SELU)
pnnx_ncnn_add_test(nn_Sigmoid)
pnnx_ncnn_add_test(nn_SiLU)
pnnx_ncnn_add_test(nn_Softmax)
pnnx_ncnn_add_test(nn_Softmax2d)
pnnx_ncnn_add_test(nn_Tanh)
pnnx_ncnn_add_test(nn_Upsample)
pnnx_ncnn_add_test(nn_UpsamplingBilinear2d)
Expand Down
56 changes: 56 additions & 0 deletions tools/pnnx/tests/ncnn/test_nn_Softmax2d.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Tencent is pleased to support the open source community by making ncnn available.
#
# Copyright (C) 2022 THL A29 Limited, a Tencent company. All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.

import torch
import torch.nn as nn
import torch.nn.functional as F

class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()

self.act_0 = nn.Softmax2d()

def forward(self, x):
x = self.act_0(x)
return x

def test():
net = Model()
net.eval()

torch.manual_seed(0)
x = torch.rand(1, 12, 24, 64)

a = net(x)

# export torchscript
mod = torch.jit.trace(net, x)
mod.save("test_nn_Softmax2d.pt")

# torchscript to pnnx
import os
os.system("../../src/pnnx test_nn_Softmax2d.pt inputshape=[1,12,24,64]")

# ncnn inference
import test_nn_Softmax2d_ncnn
b = test_nn_Softmax2d_ncnn.test_inference()

return torch.allclose(a, b, 1e-4, 1e-4)

if __name__ == "__main__":
if test():
exit(0)
else:
exit(1)
56 changes: 56 additions & 0 deletions tools/pnnx/tests/test_nn_Softmax2d.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
# Tencent is pleased to support the open source community by making ncnn available.
#
# Copyright (C) 2022 THL A29 Limited, a Tencent company. All rights reserved.
#
# Licensed under the BSD 3-Clause License (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# https://opensource.org/licenses/BSD-3-Clause
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.

import torch
import torch.nn as nn
import torch.nn.functional as F

class Model(nn.Module):
def __init__(self):
super(Model, self).__init__()

self.act_0 = nn.Softmax2d()

def forward(self, x):
x = self.act_0(x)
return x

def test():
net = Model()
net.eval()

torch.manual_seed(0)
x = torch.rand(1, 12, 24, 64)

a = net(x)

# export torchscript
mod = torch.jit.trace(net, x)
mod.save("test_nn_Softmax2d.pt")

# torchscript to pnnx
import os
os.system("../src/pnnx test_nn_Softmax2d.pt inputshape=[1,12,24,64]")

# pnnx inference
import test_nn_Softmax2d_pnnx
b = test_nn_Softmax2d_pnnx.test_inference()

return torch.equal(a, b)

if __name__ == "__main__":
if test():
exit(0)
else:
exit(1)