Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[NewIR]Remove grad apis in c_ops #57158

Merged
merged 3 commits into from
Sep 12, 2023
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
23 changes: 13 additions & 10 deletions paddle/fluid/pir/dialect/op_generator/api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,12 @@
import re

import yaml
from op_gen import OpCompatParser, OpInfoParser, to_pascal_case
from op_gen import (
PD_MANUAL_OP_LIST,
OpCompatParser,
OpInfoParser,
to_pascal_case,
)

H_FILE_TEMPLATE = """

Expand Down Expand Up @@ -81,7 +86,6 @@

OP_RESULT = 'pir::OpResult'
VECTOR_TYPE = 'pir::VectorType'
PD_MANUAL_OP_LIST = ['add_n']


def get_op_class_name(op_name):
Expand Down Expand Up @@ -111,6 +115,11 @@ def _parse_yaml(self, op_yaml_files, op_compat_yaml_file):
)
return op_info_items

def _need_skip(self, op_info, op_name):
return (
op_info.infer_meta_func is None and op_name not in PD_MANUAL_OP_LIST
)

# =====================================
# Gen declare functions
# =====================================
Expand Down Expand Up @@ -191,10 +200,7 @@ def _gen_h_file(self, op_info_items, namespaces, h_file_path):
for op_name in op_info.op_phi_name:
# NOTE:When infer_meta_func is None, the Build() function generated in pd_op
# is wrong, so temporarily skip the automatic generation of these APIs
if (
op_info.infer_meta_func is None
and op_name not in PD_MANUAL_OP_LIST
):
if self._need_skip(op_info, op_name):
continue
declare_str += self._gen_one_declare(op_info, op_name, False)
if len(op_info.mutable_attribute_name_list) > 0:
Expand Down Expand Up @@ -325,10 +331,7 @@ def _gen_cpp_file(self, op_info_items, namespaces, cpp_file_path):
for op_name in op_info.op_phi_name:
# NOTE:When infer_meta_func is None, the Build() function generated in pd_op
# is wrong, so temporarily skip the automatic generation of these APIs
if (
op_info.infer_meta_func is None
and op_name not in PD_MANUAL_OP_LIST
):
if self._need_skip(op_info, op_name):
continue
impl_str += self._gen_one_impl(op_info, op_name, False)
if len(op_info.mutable_attribute_name_list) > 0:
Expand Down
4 changes: 2 additions & 2 deletions paddle/fluid/pir/dialect/op_generator/op_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -173,7 +173,7 @@ class {op_name} : public pir::Op<{op_name}{interfaces}{traits}> {{
'bool': 'pir::BoolAttribute',
}

_NO_NEED_GEN_OPS = {'add_n', 'add_n_', 'add_n_with_kernel', 'split_grad'}
PD_MANUAL_OP_LIST = {'add_n', 'add_n_', 'add_n_with_kernel', 'split_grad'}


def to_phi_and_fluid_op_name(op_item):
Expand Down Expand Up @@ -881,7 +881,7 @@ def OpGenerator(

# If op has inplace info, we will generate inplace op and non-inplace op.
for op_name in op_info.op_phi_name:
if op_name in _NO_NEED_GEN_OPS:
if op_name in PD_MANUAL_OP_LIST:
continue
op_class_name = to_pascal_case(op_name) + "Op"
op_dialect_name = dialect_name + "." + op_name
Expand Down
32 changes: 17 additions & 15 deletions paddle/fluid/pir/dialect/op_generator/ops_api_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
import argparse
import os

from api_gen import NAMESPACE_TEMPLATE, PD_MANUAL_OP_LIST, CodeGen
from api_gen import NAMESPACE_TEMPLATE, CodeGen

CPP_FILE_TEMPLATE = """
#include <pybind11/pybind11.h>
Expand Down Expand Up @@ -55,7 +55,7 @@
}}
}}"""

NO_DY_FUNCTION_IMPL_TEMPLATE = """
STATIC_ONLY_FUNCTION_IMPL_TEMPLATE = """
static PyObject *{name}(PyObject *self, PyObject *args, PyObject *kwargs) {{
VLOG(6) << "Call static_api_{name}";
return static_api_{name}(self, args, kwargs);
Expand All @@ -64,8 +64,9 @@
OPS_API_TEMPLATE = """
{{"{name}", (PyCFunction)(void (*)(void)){name}, METH_VARARGS | METH_KEYWORDS, "C++ interface function for {name}."}},"""

SPECIAL_STATIC_ONLY_APIS = [
'fetch',
NEED_GEN_STATIC_ONLY_APIS = ['fetch']

NO_NEED_GEN_STATIC_ONLY_APIS = [
'set_value_with_tensor',
'set_value_with_tensor_',
'fused_bn_add_activation_',
Expand Down Expand Up @@ -93,14 +94,18 @@ class OpsAPIGen(CodeGen):
def __init__(self) -> None:
super().__init__()

def _need_skip(self, op_info, op_name):
return (
super()._need_skip(op_info, op_name)
or op_name.endswith('_grad')
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

endswith 是支持多个一起判断的,如

op_name.endswith(('_grad', '_grad_','xpu'))

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done

or op_name.endswith('_grad_')
or op_name.endswith('xpu')
or op_name in NO_NEED_GEN_STATIC_ONLY_APIS
)

def _gen_one_function_impl(self, name):
if (
name.endswith('_grad')
or name.endswith('_grad_')
or name.endswith('xpu')
or name in SPECIAL_STATIC_ONLY_APIS
):
return NO_DY_FUNCTION_IMPL_TEMPLATE.format(name=name)
if name in NEED_GEN_STATIC_ONLY_APIS:
return STATIC_ONLY_FUNCTION_IMPL_TEMPLATE.format(name=name)
else:
return FUNCTION_IMPL_TEMPLATE.format(name=name)

Expand All @@ -117,10 +122,7 @@ def gen_cpp_file(
ops_api_str = ''
for op_info in op_info_items:
for op_name in op_info.op_phi_name:
if (
op_info.infer_meta_func is None
and op_name not in PD_MANUAL_OP_LIST
):
if self._need_skip(op_info, op_name):
continue
function_impl_str += self._gen_one_function_impl(op_name)
ops_api_str += self._gen_one_ops_api(op_name)
Expand Down
18 changes: 3 additions & 15 deletions paddle/fluid/pir/dialect/op_generator/python_c_gen.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,7 @@
import argparse
import re

from api_gen import (
NAMESPACE_TEMPLATE,
OP_RESULT,
PD_MANUAL_OP_LIST,
VECTOR_TYPE,
CodeGen,
)
from api_gen import NAMESPACE_TEMPLATE, OP_RESULT, VECTOR_TYPE, CodeGen

H_FILE_TEMPLATE = """

Expand Down Expand Up @@ -195,10 +189,7 @@ def _gen_h_file(self, op_info_items, namespaces, h_file_path):
for op_name in op_info.op_phi_name:
# NOTE:When infer_meta_func is None, the Build() function generated in pd_op
# is wrong, so temporarily skip the automatic generation of these APIs
if (
op_info.infer_meta_func is None
and op_name not in PD_MANUAL_OP_LIST
):
if self._need_skip(op_info, op_name):
continue
declare_str += self._gen_one_declare(op_name)

Expand Down Expand Up @@ -332,10 +323,7 @@ def _gen_cpp_file(self, op_info_items, namespaces, cpp_file_path):
for op_name in op_info.op_phi_name:
# NOTE:When infer_meta_func is None, the Build() function generated in pd_op
# is wrong, so temporarily skip the automatic generation of these APIs
if (
op_info.infer_meta_func is None
and op_name not in PD_MANUAL_OP_LIST
):
if self._need_skip(op_info, op_name):
continue
impl_str += self._gen_one_impl(op_info, op_name)
body = impl_str
Expand Down
23 changes: 1 addition & 22 deletions paddle/fluid/pir/dialect/operator/ir/manual_api.cc
Original file line number Diff line number Diff line change
Expand Up @@ -18,26 +18,5 @@
#include "paddle/pir/core/builtin_op.h"

namespace paddle {
namespace dialect {
pir::OpResult split_grad(std::vector<pir::OpResult> out_grads,
pir::OpResult axis) {
auto combine_op =
APIBuilder::Instance().GetBuilder()->Build<pir::CombineOp>(out_grads);
paddle::dialect::SplitGradOp split_grad_op =
APIBuilder::Instance().GetBuilder()->Build<paddle::dialect::SplitGradOp>(
combine_op.out(), axis);

return split_grad_op.x_grad();
}

pir::OpResult split_grad(std::vector<pir::OpResult> out_grads, int axis) {
auto combine_op =
APIBuilder::Instance().GetBuilder()->Build<pir::CombineOp>(out_grads);
paddle::dialect::SplitGradOp split_grad_op =
APIBuilder::Instance().GetBuilder()->Build<paddle::dialect::SplitGradOp>(
combine_op.out(), axis);

return split_grad_op.x_grad();
}
} // namespace dialect
namespace dialect {} // namespace dialect
} // namespace paddle
8 changes: 1 addition & 7 deletions paddle/fluid/pir/dialect/operator/ir/manual_api.h
Original file line number Diff line number Diff line change
Expand Up @@ -21,11 +21,5 @@
#include "paddle/pir/core/value.h"

namespace paddle {
namespace dialect {

pir::OpResult split_grad(std::vector<pir::OpResult> out_grads,
pir::OpResult axis);

pir::OpResult split_grad(std::vector<pir::OpResult> out_grads, int axis);
} // namespace dialect
namespace dialect {} // namespace dialect
} // namespace paddle
1 change: 1 addition & 0 deletions paddle/phi/api/yaml/op_compat.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -2638,6 +2638,7 @@
out : Out

- op : split
backward : split_grad
inputs:
x : X
outputs:
Expand Down