Skip to content

Commit

Permalink
[Prim]fix attrs loss in creating op (#50780)
Browse files Browse the repository at this point in the history
* fix attrs loss in creating op

* add comment

* add case

* add case

* remove unused case setting
  • Loading branch information
cyber-pioneer authored Feb 24, 2023
1 parent 2be69d0 commit 016f5ec
Show file tree
Hide file tree
Showing 6 changed files with 111 additions and 7 deletions.
11 changes: 11 additions & 0 deletions python/paddle/fluid/framework.py
Original file line number Diff line number Diff line change
Expand Up @@ -2886,6 +2886,8 @@ def __init__(
self._type = type
self.attrs = attrs if attrs else {}
else:
self.legacy_attrs = attrs if attrs else {}

self.block = block
self.desc = desc
# note: not add self.attrs here:
Expand Down Expand Up @@ -3083,13 +3085,22 @@ def find_name(var_list, name):
)

self.desc.check_attrs()

# record all attrs needed by creating op
for item in self.desc.attr_names():
self.legacy_attrs[item] = self.desc.attr(item)

if self._has_kernel(type):
self.desc.infer_var_type(self.block.desc)
self.desc.infer_shape(self.block.desc)

def _has_kernel(self, op_type):
return op_type not in self.OP_WITHOUT_KERNEL_SET

def _get_runtime_attrs(self):
"""Record all attrs needed by creating op. This api is only for to_prim process."""
return self.legacy_attrs

def to_string(self, throw_on_error):
"""
Get debug string.
Expand Down
1 change: 1 addition & 0 deletions python/paddle/fluid/tests/unittests/prim/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -11,3 +11,4 @@ endforeach()
add_subdirectory(prim)
add_subdirectory(model)
add_subdirectory(composite_ops)
add_subdirectory(process)
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,7 @@ file(
RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
"test_*.py")

file(
GLOB TEST_OPS_GRAD
RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
"test_*_grad.py")
string(REPLACE ".py" "" TEST_OPS "${TEST_OPS}")
string(REPLACE ".py" "" TEST_OPS_GRAD "${TEST_OPS_GRAD}")

foreach(TEST_OP ${TEST_OPS})
py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS})
Expand Down
10 changes: 10 additions & 0 deletions python/paddle/fluid/tests/unittests/prim/process/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -0,0 +1,10 @@
file(
GLOB TEST_OPS
RELATIVE "${CMAKE_CURRENT_SOURCE_DIR}"
"test_*.py")

string(REPLACE ".py" "" TEST_OPS "${TEST_OPS}")

foreach(TEST_OP ${TEST_OPS})
py_test_modules(${TEST_OP} MODULES ${TEST_OP} ENVS ${GC_ENVS})
endforeach()
83 changes: 83 additions & 0 deletions python/paddle/fluid/tests/unittests/prim/process/test_copy_op.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,83 @@
# Copyright (c) 2023 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import unittest

import numpy as np

import paddle
from paddle.fluid import core

paddle.framework.random._manual_program_seed(2023)


def fn(x):
dropout1 = paddle.nn.Dropout(p=0.5)
dropout2 = paddle.nn.Dropout(p=0.6)
y = dropout1(x)
z = dropout2(y)
return z


class TestCompositeCopyOp(unittest.TestCase):
"""This case is set to test copying op process even if some attrs of origin op has been blocked during constructing program."""

def cal_composite(self, inputs):
paddle.enable_static()
core._set_prim_forward_enabled(True)
startup_program = paddle.static.Program()
main_program = paddle.static.Program()
with paddle.static.program_guard(main_program, startup_program):
x = paddle.static.data(
'x', shape=inputs.shape, dtype=str(inputs.dtype)
)
y = fn(x)
blocks = main_program.blocks

fwd_ops = [op.type for op in blocks[0].ops]
# Ensure that dropout in original block
self.assertTrue('dropout' in fwd_ops)

paddle.incubate.autograd.to_prim(blocks)

fwd_ops_new = [op.type for op in blocks[0].ops]
# Ensure that dropout is not splitted into small ops
self.assertTrue('dropout' in fwd_ops_new)

exe = paddle.static.Executor()
exe.run(startup_program)
res = exe.run(main_program, feed={'x': inputs}, fetch_list=[y])
paddle.disable_static()
core._set_prim_forward_enabled(False)
return res

def test_forward(self):
core._set_prim_forward_blacklist("dropout")
np_data = np.random.random([16, 64, 128, 128]).astype("float32")
tensor_data = paddle.to_tensor(np_data)

expect = fn(tensor_data).numpy()
actual = self.cal_composite(np_data)[0]

assert expect.dtype == actual.dtype
np.testing.assert_allclose(
expect,
actual,
rtol=0,
atol=0,
)


if __name__ == '__main__':
unittest.main()
8 changes: 6 additions & 2 deletions python/paddle/incubate/autograd/primx.py
Original file line number Diff line number Diff line change
Expand Up @@ -656,8 +656,12 @@ def expand_nested_list(xs):
outputs[op.output_names[i]] = op.output(op.output_names[i])

attrs = {}
for name in sorted(op.attr_names):
attrs[name] = op.attr(name)
# When copying op, all attrs defined in api should be kept.But op.attr_names is not complete here.
# Thus, all attrs should be got from init attrs of origin op.
runtime_attrs = op._get_runtime_attrs()
for name in runtime_attrs.keys():
attrs[name] = runtime_attrs[name]

from paddle.fluid.dygraph.base import param_guard

new_op_desc = block.desc.append_op()
Expand Down

0 comments on commit 016f5ec

Please sign in to comment.