Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

remove some [-Wunused-parameter] warning #53931

Merged
merged 1 commit into from
May 19, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions paddle/phi/ops/compat/norm_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,14 @@

namespace phi {

KernelSignature NormOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature NormOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"norm", {"X"}, {"axis", "epsilon", "is_test"}, {"Out", "Norm"});
}

KernelSignature NormGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature NormGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("norm_grad",
{"X", "Norm", "Out@GRAD"},
{"axis", "epsilon", "is_test"},
Expand Down
3 changes: 2 additions & 1 deletion paddle/phi/ops/compat/p_send_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@

namespace phi {

KernelSignature PSendOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature PSendOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("p_send", {"x"}, {"peer", "dynamic_shape"}, {});
}

Expand Down
3 changes: 2 additions & 1 deletion paddle/phi/ops/compat/pad_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,8 @@

namespace phi {

KernelSignature PadGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature PadGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"pad_grad", {"Out@GRAD"}, {"paddings", "pad_value"}, {"X@GRAD"});
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/ops/compat/pixel_unshuffle_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
namespace phi {

KernelSignature PixelUnshuffleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pixel_unshuffle_grad",
{"Out@GRAD"},
{"downscale_factor", "data_format"},
Expand Down
14 changes: 9 additions & 5 deletions paddle/phi/ops/compat/pool_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@

namespace phi {

KernelSignature Pool2dOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool2dOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool2d",
{"X"},
{"ksize",
Expand All @@ -32,7 +33,8 @@ KernelSignature Pool2dOpArgumentMapping(const ArgumentMappingContext& ctx) {
{"Out"});
}

KernelSignature Pool2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool2dGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool2d_grad",
{"X", "Out", "Out@GRAD"},
{"ksize",
Expand All @@ -49,7 +51,7 @@ KernelSignature Pool2dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
}

KernelSignature Pool2dDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool2d_double_grad",
{"X"},
{"ksize",
Expand All @@ -65,7 +67,8 @@ KernelSignature Pool2dDoubleGradOpArgumentMapping(
{"Out"});
}

KernelSignature Pool3dOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool3dOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool3d",
{"X"},
{"ksize",
Expand All @@ -81,7 +84,8 @@ KernelSignature Pool3dOpArgumentMapping(const ArgumentMappingContext& ctx) {
{"Out"});
}

KernelSignature Pool3dGradOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature Pool3dGradOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pool3d_grad",
{"X", "Out", "Out@GRAD"},
{"ksize",
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/ops/compat/pow2_decay_with_linear_warmup_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
namespace phi {

KernelSignature Pow2DecayWithLinearWarmupOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("pow2_decay_with_linear_warmup",
{"LearningRate", "Step"},
{"warmup_steps", "total_steps", "base_lr", "end_lr"},
Expand Down
3 changes: 2 additions & 1 deletion paddle/phi/ops/compat/prior_box_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@

namespace phi {

KernelSignature PriorBoxOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature PriorBoxOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("prior_box",
{"Input", "Image"},
{"min_sizes",
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/ops/compat/psroi_pool_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ KernelSignature PsroiPoolOpArgumentMapping(const ArgumentMappingContext& ctx) {
}

KernelSignature PsroiPoolGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"psroi_pool_grad",
{"X", "ROIs", "RoisNum", "Out@GRAD"},
Expand Down
14 changes: 7 additions & 7 deletions paddle/phi/ops/compat/reduce_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -160,55 +160,55 @@ KernelSignature ReduceAllOpArgumentMapping(const ArgumentMappingContext& ctx) {
}

KernelSignature ReduceSumGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sum_grad",
{"X", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
{"X@GRAD"});
}

KernelSignature ReduceMeanGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("mean_grad",
{"X", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
{"X@GRAD"});
}

KernelSignature ReduceMaxGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("max_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
{"X@GRAD"});
}

KernelSignature ReduceAMaxGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("amax_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
{"X@GRAD"});
}

KernelSignature ReduceMinGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("min_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
{"X@GRAD"});
}

KernelSignature ReduceAMinGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("amin_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
{"X@GRAD"});
}

KernelSignature ReduceProdGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("prod_grad",
{"X", "Out", "Out@GRAD"},
{"dim", "keep_dim", "reduce_all"},
Expand Down
4 changes: 2 additions & 2 deletions paddle/phi/ops/compat/reshape_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -38,12 +38,12 @@ KernelSignature ReshapeOpArgumentMapping(const ArgumentMappingContext& ctx) {
}

KernelSignature ReshapeGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("reshape_grad", {"Out@GRAD"}, {}, {"X@GRAD"});
}

KernelSignature ReshapeDoubleGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("reshape_double_grad", {"DOut", "DDX"}, {}, {"DDOut"});
}

Expand Down
5 changes: 3 additions & 2 deletions paddle/phi/ops/compat/roi_align_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,8 @@

namespace phi {

KernelSignature RoiAlignOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature RoiAlignOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_align",
{"X", "ROIs", "RoisNum"},
{"pooled_height",
Expand All @@ -28,7 +29,7 @@ KernelSignature RoiAlignOpArgumentMapping(const ArgumentMappingContext& ctx) {
}

KernelSignature RoiAlignGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_align_grad",
{"X", "ROIs", "RoisNum", "Out@GRAD"},
{"pooled_height",
Expand Down
5 changes: 3 additions & 2 deletions paddle/phi/ops/compat/roi_pool_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,16 @@

namespace phi {

KernelSignature RoiPoolOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature RoiPoolOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_pool",
{"X", "ROIs", "RoisNum"},
{"pooled_height", "pooled_width", "spatial_scale"},
{"Out", "Argmax"});
}

KernelSignature RoiPoolOpGradArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("roi_pool_grad",
{"X", "ROIs", "RoisNum", "Argmax", "Out@GRAD"},
{"pooled_height", "pooled_width", "spatial_scale"},
Expand Down
5 changes: 3 additions & 2 deletions paddle/phi/ops/compat/rrelu_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,13 +16,14 @@

namespace phi {

KernelSignature RReluOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature RReluOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"rrelu", {"X"}, {"lower", "upper", "is_test"}, {"Out", "Noise"});
}

KernelSignature RReluGradGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"rrelu_grad", {"X", "Noise", "Out@GRAD"}, {}, {"X@GRAD"});
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/ops/compat/sequence_mask_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ limitations under the License. */
namespace phi {

KernelSignature SequenceMaskOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"sequence_mask", {"X", "MaxLenTensor"}, {"maxlen", "out_dtype"}, {"Y"});
}
Expand Down
4 changes: 2 additions & 2 deletions paddle/phi/ops/compat/sequence_pool_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -14,15 +14,15 @@ limitations under the License. */
namespace phi {

KernelSignature SequencePoolOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sequence_pool",
{"X"},
{"is_test", "pooltype", "pad_value"},
{"Out", "MaxIndex"});
}

KernelSignature SequencePoolGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sequence_pool_grad",
{"X", "MaxIndex", "Out@GRAD"},
{"is_test", "pooltype", "pad_value"},
Expand Down
4 changes: 2 additions & 2 deletions paddle/phi/ops/compat/sync_batch_norm_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@
namespace phi {

KernelSignature SyncBatchNormOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sync_batch_norm",
{"X", "Mean", "Variance", "Scale", "Bias"},
{"is_test",
Expand All @@ -35,7 +35,7 @@ KernelSignature SyncBatchNormOpArgumentMapping(
}

KernelSignature SyncBatchNormGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("sync_batch_norm_grad",
{
"X",
Expand Down
5 changes: 3 additions & 2 deletions paddle/phi/ops/compat/transpose_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@

namespace phi {

KernelSignature TransposeOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature TransposeOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("transpose", {"X"}, {"axis"}, {"Out"});
}

KernelSignature TransposeGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("transpose_grad", {"Out@GRAD"}, {"axis"}, {"X@GRAD"});
}

Expand Down
5 changes: 3 additions & 2 deletions paddle/phi/ops/compat/tril_triu_sig.cc
Original file line number Diff line number Diff line change
Expand Up @@ -16,12 +16,13 @@ limitations under the License. */

namespace phi {

KernelSignature TrilTriuOpArgumentMapping(const ArgumentMappingContext& ctx) {
KernelSignature TrilTriuOpArgumentMapping(
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature("tril_triu", {"X"}, {"diagonal", "lower"}, {"Out"});
}

KernelSignature TrilTriuGradOpArgumentMapping(
const ArgumentMappingContext& ctx) {
const ArgumentMappingContext& ctx UNUSED) {
return KernelSignature(
"tril_triu_grad", {"Out@GRAD"}, {"diagonal", "lower"}, {"X@GRAD"});
}
Expand Down
2 changes: 1 addition & 1 deletion paddle/phi/tools/print_phi_kernels.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
#include "paddle/phi/core/kernel_registry.h"
#include "paddle/phi/kernels/declarations.h"

int main(int argc, char** argv) {
int main(int argc UNUSED, char** argv UNUSED) {
std::cout << phi::KernelFactory::Instance() << std::endl;
return 0;
}