From 8a2041cd627569480e1801876110f660f9ad179e Mon Sep 17 00:00:00 2001 From: Ryan <44900829+DrRyanHuang@users.noreply.github.com> Date: Thu, 26 Oct 2023 13:02:12 +0000 Subject: [PATCH 1/8] tanhshrink, thresholded_relu, Selu, RRelu --- python/paddle/nn/functional/activation.py | 10 +++++----- test/legacy_test/test_activation_op.py | 6 +++++- test/legacy_test/test_maxout_op.py | 8 ++++++-- test/legacy_test/test_rrelu_op.py | 21 +++++++++++++++------ test/legacy_test/test_selu_op.py | 14 ++++++++++---- 5 files changed, 41 insertions(+), 18 deletions(-) diff --git a/python/paddle/nn/functional/activation.py b/python/paddle/nn/functional/activation.py index fa2b447860903..ba7138bc7573c 100644 --- a/python/paddle/nn/functional/activation.py +++ b/python/paddle/nn/functional/activation.py @@ -715,7 +715,7 @@ def rrelu(x, lower=1.0 / 8.0, upper=1.0 / 3.0, training=True, name=None): is_test = not training - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.rrelu(x, lower, upper, is_test) else: check_variable_and_dtype( @@ -889,7 +889,7 @@ def maxout(x, groups, axis=1, name=None): [0.42400089, 0.40641287, 0.97020894, 0.74437362], [0.51785129, 0.73292869, 0.97786582, 0.92382854]]]]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.maxout(x, groups, axis) else: check_variable_and_dtype( @@ -1010,7 +1010,7 @@ def selu( f"The alpha must be no less than zero. Received: {alpha}." ) - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.selu(x, scale, alpha) else: check_variable_and_dtype( @@ -1536,7 +1536,7 @@ def tanhshrink(x, name=None): Tensor(shape=[4], dtype=float32, place=Place(cpu), stop_gradient=True, [-0.02005100, -0.00262472, 0.00033201, 0.00868741]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.tanh_shrink(x) else: check_variable_and_dtype( @@ -1586,7 +1586,7 @@ def thresholded_relu(x, threshold=1.0, name=None): [2., 0., 0.]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.thresholded_relu(x, threshold) else: check_variable_and_dtype( diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index 5cfdcca9983d0..4e67bf54811e9 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -1135,7 +1135,7 @@ def setUp(self): def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class TestTanhshrink_ZeroDim(TestTanhshrink): @@ -1154,6 +1154,7 @@ def setUp(self): else paddle.CPUPlace() ) + @test_with_pir_api def test_static_api(self): with static_guard(): with paddle.static.program_guard(paddle.static.Program()): @@ -1177,6 +1178,7 @@ def test_dygraph_api(self): for r in [out1, out2]: np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05) + @test_with_pir_api def test_errors(self): with static_guard(): with paddle.static.program_guard(paddle.static.Program()): @@ -4227,6 +4229,7 @@ def setUp(self): else paddle.CPUPlace() ) + @test_with_pir_api def test_static_api(self): with static_guard(): with paddle.static.program_guard(paddle.static.Program()): @@ -4250,6 +4253,7 @@ def test_dygraph_api(self): for r in [out1, out2]: np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05) + @test_with_pir_api def test_errors(self): with static_guard(): with paddle.static.program_guard(paddle.static.Program()): diff --git a/test/legacy_test/test_maxout_op.py b/test/legacy_test/test_maxout_op.py index 7f067ef62e058..f9220cbc6d0d7 100644 --- a/test/legacy_test/test_maxout_op.py +++ b/test/legacy_test/test_maxout_op.py @@ -20,6 +20,7 @@ import paddle import paddle.nn.functional as F from paddle.base import core +from paddle.pir_utils import test_with_pir_api paddle.enable_static() np.random.seed(1) @@ -57,10 +58,10 @@ def set_attrs(self): pass def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class TestMaxOutOpAxis0(TestMaxOutOp): @@ -95,6 +96,7 @@ def setUp(self): else paddle.CPUPlace() ) + @test_with_pir_api def test_static_api(self): with paddle.static.program_guard(paddle.static.Program()): x = paddle.static.data('X', self.x_np.shape, self.x_np.dtype) @@ -122,6 +124,7 @@ def test_dygraph_api(self): np.testing.assert_allclose(out3_ref, out3.numpy(), rtol=1e-05) paddle.enable_static() + @test_with_pir_api def test_errors(self): with paddle.static.program_guard(paddle.static.Program()): # The input type must be Variable. @@ -161,6 +164,7 @@ def setUp(self): self.axis = 1 self.place = paddle.CUDAPlace(0) + @test_with_pir_api def test_static_api(self): with paddle.static.program_guard(paddle.static.Program()): x = paddle.static.data('X', self.x_np.shape, self.x_np.dtype) diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index e3b5e073d719a..4d205ba0e5358 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -21,6 +21,7 @@ import paddle.nn.functional as F from paddle import base from paddle.base import core, dygraph +from paddle.pir_utils import test_with_pir_api paddle.seed(102) np.random.seed(102) @@ -87,10 +88,12 @@ def check_static_result(self, place): ) np.testing.assert_allclose(fetches[0], res_np2, rtol=1e-05) + @test_with_pir_api def test_static(self): for place in self.places: self.check_static_result(place=place) + @test_with_pir_api def test_static_graph_functional(self): '''test_static_graph_functional''' @@ -134,6 +137,7 @@ def test_static_graph_functional(self): check_output(self.x_np, res_3[0], self.lower_1, self.upper_1) ) + @test_with_pir_api def test_static_graph_layer(self): '''test_static_graph_layer''' @@ -214,6 +218,7 @@ def test_dygraph(self): ) paddle.enable_static() + @test_with_pir_api def test_error_functional(self): paddle.enable_static() with paddle.static.program_guard(paddle.static.Program()): @@ -351,10 +356,10 @@ def convert_input_output(self): pass def test_check_output(self): - self.check_output(no_check_set=['Noise']) + self.check_output(no_check_set=['Noise'], check_pir=True) def test_check_grad(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class RReluTrainingTest(RReluTest): @@ -394,11 +399,13 @@ def convert_input_output(self): def test_check_output(self): place = core.CUDAPlace(0) - self.check_output_with_place(place, no_check_set=['Noise']) + self.check_output_with_place( + place, no_check_set=['Noise'], check_pir=True + ) def test_check_grad(self): place = core.CUDAPlace(0) - self.check_grad_with_place(place, ['X'], 'Out') + self.check_grad_with_place(place, ['X'], 'Out', check_pir=True) class RReluTrainingTestFP16OP(RReluTrainingTest): @@ -425,11 +432,13 @@ def convert_input_output(self): def test_check_output(self): place = core.CUDAPlace(0) - self.check_output_with_place(place, no_check_set=['Noise']) + self.check_output_with_place( + place, no_check_set=['Noise'], check_pir=True + ) def test_check_grad(self): place = core.CUDAPlace(0) - self.check_grad_with_place(place, ['X'], 'Out') + self.check_grad_with_place(place, ['X'], 'Out', check_pir=True) if __name__ == "__main__": diff --git a/test/legacy_test/test_selu_op.py b/test/legacy_test/test_selu_op.py index 52162111b85d8..93f1daf81a0a6 100644 --- a/test/legacy_test/test_selu_op.py +++ b/test/legacy_test/test_selu_op.py @@ -21,6 +21,7 @@ import paddle.nn.functional as F from paddle import base from paddle.base import core +from paddle.pir_utils import test_with_pir_api def ref_selu( @@ -79,10 +80,10 @@ def init_dtype(self): self.dtype = np.float64 def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) def test_check_grad(self): - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) class SeluTestFP16OP(SeluTest): @@ -100,10 +101,12 @@ def init_dtype(self): self.dtype = np.uint16 def test_check_output(self): - self.check_output_with_place(core.CUDAPlace(0)) + self.check_output_with_place(core.CUDAPlace(0), check_pir=True) def test_check_grad(self): - self.check_grad_with_place(core.CUDAPlace(0), ['X'], 'Out') + self.check_grad_with_place( + core.CUDAPlace(0), ['X'], 'Out', check_pir=True + ) class TestSeluAPI(unittest.TestCase): @@ -121,6 +124,7 @@ def setUp(self): else paddle.CPUPlace() ) + @test_with_pir_api def test_static_api(self): with paddle.static.program_guard(paddle.static.Program()): x = paddle.static.data('X', self.x_np.shape, self.x_np.dtype) @@ -144,6 +148,7 @@ def test_dygraph_api(self): np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05) paddle.enable_static() + @test_with_pir_api def test_base_api(self): with base.program_guard(base.Program()): x = paddle.static.data('X', self.x_np.shape, self.x_np.dtype) @@ -153,6 +158,7 @@ def test_base_api(self): out_ref = ref_selu(self.x_np, self.scale, self.alpha) np.testing.assert_allclose(out_ref, res[0], rtol=1e-05) + @test_with_pir_api def test_errors(self): with paddle.static.program_guard(paddle.static.Program()): # The input type must be Variable. From f9743da5c2eabe1d0b3805e3c93decd06df78704 Mon Sep 17 00:00:00 2001 From: drryanhuang Date: Tue, 31 Oct 2023 14:35:27 +0000 Subject: [PATCH 2/8] remove test_with_pir_api --- test/legacy_test/test_activation_op.py | 2 -- test/legacy_test/test_maxout_op.py | 1 - test/legacy_test/test_rrelu_op.py | 1 - test/legacy_test/test_selu_op.py | 1 - 4 files changed, 5 deletions(-) diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index 4e67bf54811e9..c0fc9ab34a84e 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -1178,7 +1178,6 @@ def test_dygraph_api(self): for r in [out1, out2]: np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05) - @test_with_pir_api def test_errors(self): with static_guard(): with paddle.static.program_guard(paddle.static.Program()): @@ -4253,7 +4252,6 @@ def test_dygraph_api(self): for r in [out1, out2]: np.testing.assert_allclose(out_ref, r.numpy(), rtol=1e-05) - @test_with_pir_api def test_errors(self): with static_guard(): with paddle.static.program_guard(paddle.static.Program()): diff --git a/test/legacy_test/test_maxout_op.py b/test/legacy_test/test_maxout_op.py index f9220cbc6d0d7..4b7f0cbfba6a1 100644 --- a/test/legacy_test/test_maxout_op.py +++ b/test/legacy_test/test_maxout_op.py @@ -124,7 +124,6 @@ def test_dygraph_api(self): np.testing.assert_allclose(out3_ref, out3.numpy(), rtol=1e-05) paddle.enable_static() - @test_with_pir_api def test_errors(self): with paddle.static.program_guard(paddle.static.Program()): # The input type must be Variable. diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index 4d205ba0e5358..14bfdc4cd1def 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -218,7 +218,6 @@ def test_dygraph(self): ) paddle.enable_static() - @test_with_pir_api def test_error_functional(self): paddle.enable_static() with paddle.static.program_guard(paddle.static.Program()): diff --git a/test/legacy_test/test_selu_op.py b/test/legacy_test/test_selu_op.py index 93f1daf81a0a6..4a3c86fba4bf9 100644 --- a/test/legacy_test/test_selu_op.py +++ b/test/legacy_test/test_selu_op.py @@ -158,7 +158,6 @@ def test_base_api(self): out_ref = ref_selu(self.x_np, self.scale, self.alpha) np.testing.assert_allclose(out_ref, res[0], rtol=1e-05) - @test_with_pir_api def test_errors(self): with paddle.static.program_guard(paddle.static.Program()): # The input type must be Variable. From f098c799658f9c69ecb8cb05bc33a351f79e1c1b Mon Sep 17 00:00:00 2001 From: drryanhuang Date: Tue, 31 Oct 2023 15:59:41 +0000 Subject: [PATCH 3/8] remove test_with_pir_api --- test/legacy_test/test_rrelu_op.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index 14bfdc4cd1def..354d1d005fbe4 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -21,7 +21,6 @@ import paddle.nn.functional as F from paddle import base from paddle.base import core, dygraph -from paddle.pir_utils import test_with_pir_api paddle.seed(102) np.random.seed(102) @@ -88,12 +87,10 @@ def check_static_result(self, place): ) np.testing.assert_allclose(fetches[0], res_np2, rtol=1e-05) - @test_with_pir_api def test_static(self): for place in self.places: self.check_static_result(place=place) - @test_with_pir_api def test_static_graph_functional(self): '''test_static_graph_functional''' @@ -137,7 +134,6 @@ def test_static_graph_functional(self): check_output(self.x_np, res_3[0], self.lower_1, self.upper_1) ) - @test_with_pir_api def test_static_graph_layer(self): '''test_static_graph_layer''' From cf7502aacde9c3006cfba6d13588661ddbc4fe2a Mon Sep 17 00:00:00 2001 From: drryanhuang Date: Sun, 5 Nov 2023 03:09:30 +0000 Subject: [PATCH 4/8] add some test --- test/legacy_test/test_activation_op.py | 12 +++++++++--- test/legacy_test/test_rrelu_op.py | 12 +++++------- 2 files changed, 14 insertions(+), 10 deletions(-) diff --git a/test/legacy_test/test_activation_op.py b/test/legacy_test/test_activation_op.py index 4a86f22a70e08..036d418f1ce4c 100644 --- a/test/legacy_test/test_activation_op.py +++ b/test/legacy_test/test_activation_op.py @@ -1143,6 +1143,9 @@ def test_check_grad(self): return self.check_grad(['X'], 'Out', check_pir=True) + def test_check_output(self): + self.check_output(check_pir=True) + class TestTanhshrink_ZeroDim(TestTanhshrink): def init_shape(self): @@ -4224,7 +4227,10 @@ def init_shape(self): def test_check_grad(self): if self.dtype == np.float16: return - self.check_grad(['X'], 'Out') + self.check_grad(['X'], 'Out', check_pir=True) + + def test_check_output(self): + self.check_output(check_pir=True) class TestThresholdedRelu_ZeroDim(TestThresholdedRelu): @@ -4700,7 +4706,7 @@ def test_check_grad(self): create_test_act_fp16_class( TestTanh, check_prim=True, check_prim_pir=True, enable_cinn=True ) -create_test_act_fp16_class(TestTanhshrink) +create_test_act_fp16_class(TestTanhshrink, check_pir=True) create_test_act_fp16_class(TestHardShrink) create_test_act_fp16_class(TestSoftshrink) create_test_act_fp16_class( @@ -4868,7 +4874,7 @@ def test_check_grad(self): create_test_act_bf16_class(TestSilu, check_prim=True, check_prim_pir=True) create_test_act_bf16_class(TestLogSigmoid) create_test_act_bf16_class(TestTanh, check_prim=True, check_prim_pir=True) -create_test_act_bf16_class(TestTanhshrink) +create_test_act_bf16_class(TestTanhshrink, check_pir=True) create_test_act_bf16_class(TestHardShrink) create_test_act_bf16_class(TestSoftshrink) create_test_act_bf16_class( diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index 354d1d005fbe4..8ce22c02cc1de 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -21,6 +21,7 @@ import paddle.nn.functional as F from paddle import base from paddle.base import core, dygraph +from paddle.pir_utils import test_with_pir_api paddle.seed(102) np.random.seed(102) @@ -56,6 +57,7 @@ def setUp(self): else base.CPUPlace() ] + @test_with_pir_api def check_static_result(self, place): with base.program_guard(base.Program(), base.Program()): input = paddle.static.data( @@ -72,7 +74,6 @@ def check_static_result(self, place): res_np1 = ref_rrelu(in_np, self.lower_0, self.upper_0) exe = base.Executor(place) fetches = exe.run( - base.default_main_program(), feed={"input": in_np}, fetch_list=[res1], ) @@ -81,7 +82,6 @@ def check_static_result(self, place): res_np2 = ref_rrelu(in_np, self.lower_1, self.upper_1) fetches = exe.run( - base.default_main_program(), feed={"input": in_np}, fetch_list=[res2], ) @@ -91,6 +91,7 @@ def test_static(self): for place in self.places: self.check_static_result(place=place) + @test_with_pir_api def test_static_graph_functional(self): '''test_static_graph_functional''' @@ -108,19 +109,16 @@ def test_static_graph_functional(self): exe = paddle.static.Executor(place=place) (res_1,) = exe.run( - base.default_main_program(), feed={"x": self.x_np}, fetch_list=out_1, use_prune=True, ) (res_2,) = exe.run( - base.default_main_program(), feed={"x2": self.x_np}, fetch_list=out_2, use_prune=True, ) (res_3,) = exe.run( - base.default_main_program(), feed={"x2": self.x_np}, fetch_list=out_3, use_prune=True, @@ -134,6 +132,7 @@ def test_static_graph_functional(self): check_output(self.x_np, res_3[0], self.lower_1, self.upper_1) ) + @test_with_pir_api def test_static_graph_layer(self): '''test_static_graph_layer''' @@ -153,13 +152,11 @@ def test_static_graph_layer(self): exe = paddle.static.Executor(place=place) res_1 = exe.run( - base.default_main_program(), feed={"x": self.x_np}, fetch_list=out_1, use_prune=True, ) res_2 = exe.run( - base.default_main_program(), feed={"x2": self.x_np}, fetch_list=out_2, use_prune=True, @@ -214,6 +211,7 @@ def test_dygraph(self): ) paddle.enable_static() + @test_with_pir_api def test_error_functional(self): paddle.enable_static() with paddle.static.program_guard(paddle.static.Program()): From a52ba2db260d4b3204c366fb04933a0c5e1c6d2e Mon Sep 17 00:00:00 2001 From: drryanhuang Date: Sun, 5 Nov 2023 08:12:31 +0000 Subject: [PATCH 5/8] add program_guard context manager --- test/legacy_test/test_rrelu_op.py | 144 ++++++++++++++++-------------- 1 file changed, 78 insertions(+), 66 deletions(-) diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index 8ce22c02cc1de..838bbcd2522b3 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -95,79 +95,91 @@ def test_static(self): def test_static_graph_functional(self): '''test_static_graph_functional''' - for place in self.places: - paddle.enable_static() - x_1 = paddle.static.data( - name="x", shape=self.x_np.shape, dtype="float64" - ) - x_2 = paddle.static.data( - name="x2", shape=self.x_np.shape, dtype="float64" - ) - out_1 = F.rrelu(x_1, self.lower_0, self.upper_0, training=False) - out_2 = F.rrelu(x_2, self.lower_1, self.upper_1, training=False) - out_3 = F.rrelu(x_2, self.lower_1, self.upper_1, training=True) - - exe = paddle.static.Executor(place=place) - (res_1,) = exe.run( - feed={"x": self.x_np}, - fetch_list=out_1, - use_prune=True, - ) - (res_2,) = exe.run( - feed={"x2": self.x_np}, - fetch_list=out_2, - use_prune=True, - ) - (res_3,) = exe.run( - feed={"x2": self.x_np}, - fetch_list=out_3, - use_prune=True, - ) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + for place in self.places: + paddle.enable_static() + x_1 = paddle.static.data( + name="x", shape=self.x_np.shape, dtype="float64" + ) + x_2 = paddle.static.data( + name="x2", shape=self.x_np.shape, dtype="float64" + ) + out_1 = F.rrelu(x_1, self.lower_0, self.upper_0, training=False) + out_2 = F.rrelu(x_2, self.lower_1, self.upper_1, training=False) + out_3 = F.rrelu(x_2, self.lower_1, self.upper_1, training=True) + + exe = paddle.static.Executor(place=place) + (res_1,) = exe.run( + feed={"x": self.x_np}, + fetch_list=out_1, + use_prune=True, + ) + (res_2,) = exe.run( + feed={"x2": self.x_np}, + fetch_list=out_2, + use_prune=True, + ) + (res_3,) = exe.run( + feed={"x2": self.x_np}, + fetch_list=out_3, + use_prune=True, + ) - out_ref_1 = ref_rrelu(self.x_np, self.lower_0, self.upper_0) - out_ref_2 = ref_rrelu(self.x_np, self.lower_1, self.upper_1) - np.testing.assert_allclose(out_ref_1, res_1, rtol=1e-05) - np.testing.assert_allclose(out_ref_2, res_2, rtol=1e-05) - self.assertTrue( - check_output(self.x_np, res_3[0], self.lower_1, self.upper_1) - ) + out_ref_1 = ref_rrelu(self.x_np, self.lower_0, self.upper_0) + out_ref_2 = ref_rrelu(self.x_np, self.lower_1, self.upper_1) + np.testing.assert_allclose(out_ref_1, res_1, rtol=1e-05) + np.testing.assert_allclose(out_ref_2, res_2, rtol=1e-05) + self.assertTrue( + check_output( + self.x_np, res_3[0], self.lower_1, self.upper_1 + ) + ) @test_with_pir_api def test_static_graph_layer(self): '''test_static_graph_layer''' - for place in self.places: - paddle.enable_static() - x_1 = paddle.static.data( - name="x", shape=self.x_np.shape, dtype="float64" - ) - x_2 = paddle.static.data( - name="x2", shape=self.x_np.shape, dtype="float64" - ) - # init instance - rrelu_1 = paddle.nn.RReLU(self.lower_0, self.upper_0) - rrelu_2 = paddle.nn.RReLU(self.lower_1, self.upper_1) - out_1 = rrelu_1(x_1) - out_2 = rrelu_2(x_2) - - exe = paddle.static.Executor(place=place) - res_1 = exe.run( - feed={"x": self.x_np}, - fetch_list=out_1, - use_prune=True, - ) - res_2 = exe.run( - feed={"x2": self.x_np}, - fetch_list=out_2, - use_prune=True, - ) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + for place in self.places: + paddle.enable_static() + x_1 = paddle.static.data( + name="x", shape=self.x_np.shape, dtype="float64" + ) + x_2 = paddle.static.data( + name="x2", shape=self.x_np.shape, dtype="float64" + ) + # init instance + rrelu_1 = paddle.nn.RReLU(self.lower_0, self.upper_0) + rrelu_2 = paddle.nn.RReLU(self.lower_1, self.upper_1) + out_1 = rrelu_1(x_1) + out_2 = rrelu_2(x_2) + + exe = paddle.static.Executor(place=place) + res_1 = exe.run( + feed={"x": self.x_np}, + fetch_list=out_1, + use_prune=True, + ) + res_2 = exe.run( + feed={"x2": self.x_np}, + fetch_list=out_2, + use_prune=True, + ) - self.assertTrue( - check_output(self.x_np, res_1[0], self.lower_0, self.upper_0) - ) - self.assertTrue( - check_output(self.x_np, res_2[0], self.lower_1, self.upper_1) - ) + self.assertTrue( + check_output( + self.x_np, res_1[0], self.lower_0, self.upper_0 + ) + ) + self.assertTrue( + check_output( + self.x_np, res_2[0], self.lower_1, self.upper_1 + ) + ) def dygraph_check(self, lower, upper): for place in self.places: From f0fec98130c4f8f3bc5e7ac9c7cea517d049926d Mon Sep 17 00:00:00 2001 From: drryanhuang Date: Sun, 5 Nov 2023 14:24:20 +0000 Subject: [PATCH 6/8] add static.Program --- test/legacy_test/test_rrelu_op.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index 838bbcd2522b3..d24c7794ec257 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -59,7 +59,9 @@ def setUp(self): @test_with_pir_api def check_static_result(self, place): - with base.program_guard(base.Program(), base.Program()): + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): input = paddle.static.data( name="input", shape=[2, 3, 4, 5], dtype="float32" ) From cb00908641e3b97c12f8be56f8e665507ec501ff Mon Sep 17 00:00:00 2001 From: DrRyanHuang Date: Fri, 10 Nov 2023 01:50:55 +0000 Subject: [PATCH 7/8] mv for-loop out && rm @test_with_pir_api && split program_guard --- test/legacy_test/test_rrelu_op.py | 107 +++++++++++++++++++++--------- 1 file changed, 74 insertions(+), 33 deletions(-) diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index d24c7794ec257..7c8067914fd91 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -59,18 +59,15 @@ def setUp(self): @test_with_pir_api def check_static_result(self, place): - main = paddle.static.Program() - startup = paddle.static.Program() - with paddle.static.program_guard(main, startup): + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): input = paddle.static.data( name="input", shape=[2, 3, 4, 5], dtype="float32" ) res1 = F.rrelu( x=input, lower=self.lower_0, upper=self.upper_0, training=False ) - res2 = F.rrelu( - x=input, lower=self.lower_1, upper=self.upper_1, training=False - ) in_np = np.random.uniform(-1.0, 1.0, [2, 3, 4, 5]).astype("float32") res_np1 = ref_rrelu(in_np, self.lower_0, self.upper_0) @@ -82,6 +79,20 @@ def check_static_result(self, place): np.testing.assert_allclose(fetches[0], res_np1, rtol=1e-05) + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): + input = paddle.static.data( + name="input", shape=[2, 3, 4, 5], dtype="float32" + ) + + res2 = F.rrelu( + x=input, lower=self.lower_1, upper=self.upper_1, training=False + ) + in_np = np.random.uniform(-1.0, 1.0, [2, 3, 4, 5]).astype("float32") + + exe = base.Executor(place) + res_np2 = ref_rrelu(in_np, self.lower_1, self.upper_1) fetches = exe.run( feed={"input": in_np}, @@ -99,18 +110,13 @@ def test_static_graph_functional(self): main = paddle.static.Program() startup = paddle.static.Program() - with paddle.static.program_guard(main, startup): - for place in self.places: + for place in self.places: + with paddle.static.program_guard(main, startup): paddle.enable_static() x_1 = paddle.static.data( name="x", shape=self.x_np.shape, dtype="float64" ) - x_2 = paddle.static.data( - name="x2", shape=self.x_np.shape, dtype="float64" - ) out_1 = F.rrelu(x_1, self.lower_0, self.upper_0, training=False) - out_2 = F.rrelu(x_2, self.lower_1, self.upper_1, training=False) - out_3 = F.rrelu(x_2, self.lower_1, self.upper_1, training=True) exe = paddle.static.Executor(place=place) (res_1,) = exe.run( @@ -118,21 +124,47 @@ def test_static_graph_functional(self): fetch_list=out_1, use_prune=True, ) + + out_ref_1 = ref_rrelu(self.x_np, self.lower_0, self.upper_0) + np.testing.assert_allclose(out_ref_1, res_1, rtol=1e-05) + + with paddle.static.program_guard(main, startup): + paddle.enable_static() + + x_2 = paddle.static.data( + name="x2", shape=self.x_np.shape, dtype="float64" + ) + out_2 = F.rrelu(x_2, self.lower_1, self.upper_1, training=False) + + exe = paddle.static.Executor(place=place) + (res_2,) = exe.run( feed={"x2": self.x_np}, fetch_list=out_2, use_prune=True, ) + + out_ref_2 = ref_rrelu(self.x_np, self.lower_1, self.upper_1) + + np.testing.assert_allclose(out_ref_2, res_2, rtol=1e-05) + + with paddle.static.program_guard(main, startup): + paddle.enable_static() + + x_2 = paddle.static.data( + name="x2", shape=self.x_np.shape, dtype="float64" + ) + + out_3 = F.rrelu(x_2, self.lower_1, self.upper_1, training=True) + + exe = paddle.static.Executor(place=place) + (res_3,) = exe.run( feed={"x2": self.x_np}, fetch_list=out_3, use_prune=True, ) - out_ref_1 = ref_rrelu(self.x_np, self.lower_0, self.upper_0) - out_ref_2 = ref_rrelu(self.x_np, self.lower_1, self.upper_1) - np.testing.assert_allclose(out_ref_1, res_1, rtol=1e-05) - np.testing.assert_allclose(out_ref_2, res_2, rtol=1e-05) self.assertTrue( check_output( self.x_np, res_3[0], self.lower_1, self.upper_1 @@ -143,22 +175,18 @@ def test_static_graph_functional(self): def test_static_graph_layer(self): '''test_static_graph_layer''' - main = paddle.static.Program() - startup = paddle.static.Program() - with paddle.static.program_guard(main, startup): - for place in self.places: - paddle.enable_static() + paddle.enable_static() + for place in self.places: + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): x_1 = paddle.static.data( name="x", shape=self.x_np.shape, dtype="float64" ) - x_2 = paddle.static.data( - name="x2", shape=self.x_np.shape, dtype="float64" - ) + # init instance rrelu_1 = paddle.nn.RReLU(self.lower_0, self.upper_0) - rrelu_2 = paddle.nn.RReLU(self.lower_1, self.upper_1) out_1 = rrelu_1(x_1) - out_2 = rrelu_2(x_2) exe = paddle.static.Executor(place=place) res_1 = exe.run( @@ -166,17 +194,31 @@ def test_static_graph_layer(self): fetch_list=out_1, use_prune=True, ) - res_2 = exe.run( - feed={"x2": self.x_np}, - fetch_list=out_2, - use_prune=True, - ) self.assertTrue( check_output( self.x_np, res_1[0], self.lower_0, self.upper_0 ) ) + + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): + x_2 = paddle.static.data( + name="x2", shape=self.x_np.shape, dtype="float64" + ) + # init instance + rrelu_2 = paddle.nn.RReLU(self.lower_1, self.upper_1) + out_2 = rrelu_2(x_2) + + exe = paddle.static.Executor(place=place) + + res_2 = exe.run( + feed={"x2": self.x_np}, + fetch_list=out_2, + use_prune=True, + ) + self.assertTrue( check_output( self.x_np, res_2[0], self.lower_1, self.upper_1 @@ -225,7 +267,6 @@ def test_dygraph(self): ) paddle.enable_static() - @test_with_pir_api def test_error_functional(self): paddle.enable_static() with paddle.static.program_guard(paddle.static.Program()): From b0f98df745226689e7aabe455530559544a82ec7 Mon Sep 17 00:00:00 2001 From: DrRyanHuang Date: Sat, 11 Nov 2023 10:25:58 +0000 Subject: [PATCH 8/8] replace paddle.static.Program() --- test/legacy_test/test_rrelu_op.py | 14 +++++++++----- 1 file changed, 9 insertions(+), 5 deletions(-) diff --git a/test/legacy_test/test_rrelu_op.py b/test/legacy_test/test_rrelu_op.py index 7c8067914fd91..79ab7a65f3e03 100644 --- a/test/legacy_test/test_rrelu_op.py +++ b/test/legacy_test/test_rrelu_op.py @@ -108,10 +108,10 @@ def test_static(self): def test_static_graph_functional(self): '''test_static_graph_functional''' - main = paddle.static.Program() - startup = paddle.static.Program() for place in self.places: - with paddle.static.program_guard(main, startup): + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): paddle.enable_static() x_1 = paddle.static.data( name="x", shape=self.x_np.shape, dtype="float64" @@ -128,7 +128,9 @@ def test_static_graph_functional(self): out_ref_1 = ref_rrelu(self.x_np, self.lower_0, self.upper_0) np.testing.assert_allclose(out_ref_1, res_1, rtol=1e-05) - with paddle.static.program_guard(main, startup): + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): paddle.enable_static() x_2 = paddle.static.data( @@ -148,7 +150,9 @@ def test_static_graph_functional(self): np.testing.assert_allclose(out_ref_2, res_2, rtol=1e-05) - with paddle.static.program_guard(main, startup): + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): paddle.enable_static() x_2 = paddle.static.data(