From f0c00d042e5a56bf7557e3609951210cc7c06d33 Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Mon, 6 Nov 2023 21:23:19 +0800 Subject: [PATCH 1/8] fix --- python/paddle/vision/ops.py | 6 +++--- test/legacy_test/test_nms_op.py | 2 +- test/legacy_test/test_nn_functional_hot_op.py | 8 ++++---- test/legacy_test/test_one_hot_v2_op.py | 8 ++++---- test/legacy_test/test_ops_nms.py | 3 +++ 5 files changed, 15 insertions(+), 12 deletions(-) diff --git a/python/paddle/vision/ops.py b/python/paddle/vision/ops.py index 5a8b433cea52e..47ed79f9818c7 100755 --- a/python/paddle/vision/ops.py +++ b/python/paddle/vision/ops.py @@ -20,7 +20,7 @@ from ..base import core from ..base.data_feeder import check_type, check_variable_and_dtype -from ..base.framework import Variable, in_dygraph_mode +from ..base.framework import Variable, in_dygraph_mode, in_dynamic_or_pir_mode from ..base.layer_helper import LayerHelper from ..framework import _current_expected_place from ..nn import BatchNorm2D, Conv2D, Layer, ReLU, Sequential @@ -1937,7 +1937,7 @@ def nms( """ def _nms(boxes, iou_threshold): - if in_dygraph_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.nms(boxes, iou_threshold) else: @@ -2017,7 +2017,7 @@ def _nms(boxes, iou_threshold): if top_k is None: return keep_boxes_idxs[sorted_sub_indices] - if in_dygraph_mode(): + if in_dynamic_or_pir_mode(): top_k = shape if shape < top_k else top_k _, topk_sub_indices = paddle.topk(scores[keep_boxes_idxs], top_k) return keep_boxes_idxs[topk_sub_indices] diff --git a/test/legacy_test/test_nms_op.py b/test/legacy_test/test_nms_op.py index 68715db80d578..2c1951039ce72 100755 --- a/test/legacy_test/test_nms_op.py +++ b/test/legacy_test/test_nms_op.py @@ -90,7 +90,7 @@ def init_dtype_type(self): pass def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) if __name__ == "__main__": diff --git a/test/legacy_test/test_nn_functional_hot_op.py b/test/legacy_test/test_nn_functional_hot_op.py index 89a140a1f031c..6ae5249eda992 100644 --- a/test/legacy_test/test_nn_functional_hot_op.py +++ b/test/legacy_test/test_nn_functional_hot_op.py @@ -43,7 +43,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False) + self.check_output(check_dygraph=False, check_pir=True) class TestOneHotOp_attr(OpTest): @@ -67,7 +67,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False) + self.check_output(check_dygraph=False, check_pir=True) class TestOneHotOp_default_dtype(OpTest): @@ -90,7 +90,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False) + self.check_output(check_dygraph=False, check_pir=True) class TestOneHotOp_default_dtype_attr(OpTest): @@ -114,7 +114,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False) + self.check_output(check_dygraph=False, check_pir=True) class TestOneHotOpApi(unittest.TestCase): diff --git a/test/legacy_test/test_one_hot_v2_op.py b/test/legacy_test/test_one_hot_v2_op.py index 2116b2d494dc2..9e30a9133c0ec 100644 --- a/test/legacy_test/test_one_hot_v2_op.py +++ b/test/legacy_test/test_one_hot_v2_op.py @@ -48,7 +48,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_cinn=True) + self.check_output(check_cinn=True, check_pir=True) class TestOneHotOp_attr(OpTest): @@ -74,7 +74,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_cinn=True) + self.check_output(check_cinn=True, check_pir=True) class TestOneHotOp_default_dtype(OpTest): @@ -98,7 +98,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_cinn=True) + self.check_output(check_cinn=True, check_pir=True) class TestOneHotOp_default_dtype_attr(OpTest): @@ -124,7 +124,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output() + self.check_output(check_pir=True) class TestOneHotOpApi(unittest.TestCase): diff --git a/test/legacy_test/test_ops_nms.py b/test/legacy_test/test_ops_nms.py index 0e6a5d9545543..984d957366419 100644 --- a/test/legacy_test/test_ops_nms.py +++ b/test/legacy_test/test_ops_nms.py @@ -20,6 +20,7 @@ from test_nms_op import nms import paddle +from paddle.pir_utils import test_with_pir_api def _find(condition): @@ -138,6 +139,7 @@ def test_multiclass_nms_dynamic(self): err_msg=f'paddle out: {out}\n py out: {out_py}\n', ) + @test_with_pir_api def test_multiclass_nms_static(self): for device in self.devices: for dtype in self.dtypes: @@ -189,6 +191,7 @@ def test_multiclass_nms_static(self): err_msg=f'paddle out: {out}\n py out: {out_py}\n', ) + @test_with_pir_api def test_multiclass_nms_dynamic_to_static(self): for device in self.devices: for dtype in self.dtypes: From c8c8c2554904cfc876f5e86975b07030ad314b2f Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Thu, 9 Nov 2023 12:49:49 +0800 Subject: [PATCH 2/8] update setup --- test/legacy_test/test_nn_functional_hot_op.py | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/test/legacy_test/test_nn_functional_hot_op.py b/test/legacy_test/test_nn_functional_hot_op.py index 6ae5249eda992..85ad617ee9f3a 100644 --- a/test/legacy_test/test_nn_functional_hot_op.py +++ b/test/legacy_test/test_nn_functional_hot_op.py @@ -23,9 +23,14 @@ from paddle.nn import functional +def one_hot_wrapper(x, depth_tensor, **keargs): + return paddle.nn.functional.one_hot(x, depth_tensor) + + class TestOneHotOp(OpTest): def setUp(self): self.op_type = 'one_hot_v2' + self.python_api = one_hot_wrapper depth = 10 depth_np = np.array(10).astype('int32') dimension = 12 @@ -49,6 +54,7 @@ def test_check_output(self): class TestOneHotOp_attr(OpTest): def setUp(self): self.op_type = 'one_hot_v2' + self.python_api = one_hot_wrapper depth = 10 dimension = 12 x_lod = [[4, 1, 3, 3]] @@ -73,6 +79,7 @@ def test_check_output(self): class TestOneHotOp_default_dtype(OpTest): def setUp(self): self.op_type = 'one_hot_v2' + self.python_api = one_hot_wrapper depth = 10 depth_np = np.array(10).astype('int32') dimension = 12 @@ -96,6 +103,7 @@ def test_check_output(self): class TestOneHotOp_default_dtype_attr(OpTest): def setUp(self): self.op_type = 'one_hot_v2' + self.python_api = one_hot_wrapper depth = 10 dimension = 12 x_lod = [[4, 1, 3, 3]] From 7dbb662a74be1fc2e4f4f38b3db69e803c547f1d Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Thu, 9 Nov 2023 20:29:26 +0800 Subject: [PATCH 3/8] fix --- test/legacy_test/test_nn_functional_hot_op.py | 18 +++++++++------ test/legacy_test/test_one_hot_v2_op.py | 22 +++++++++++-------- 2 files changed, 24 insertions(+), 16 deletions(-) diff --git a/test/legacy_test/test_nn_functional_hot_op.py b/test/legacy_test/test_nn_functional_hot_op.py index 85ad617ee9f3a..4ae321a128974 100644 --- a/test/legacy_test/test_nn_functional_hot_op.py +++ b/test/legacy_test/test_nn_functional_hot_op.py @@ -21,6 +21,7 @@ from paddle import base from paddle.base import core from paddle.nn import functional +from paddle.pir_utils import test_with_pir_api def one_hot_wrapper(x, depth_tensor, **keargs): @@ -48,7 +49,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False, check_pir=True) + self.check_output(check_dygraph=False) class TestOneHotOp_attr(OpTest): @@ -73,7 +74,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False, check_pir=True) + self.check_output(check_dygraph=False) class TestOneHotOp_default_dtype(OpTest): @@ -97,7 +98,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False, check_pir=True) + self.check_output(check_dygraph=False) class TestOneHotOp_default_dtype_attr(OpTest): @@ -122,14 +123,16 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_dygraph=False, check_pir=True) + self.check_output(check_dygraph=False) class TestOneHotOpApi(unittest.TestCase): + @test_with_pir_api def test_api(self): num_classes = 10 self._run(num_classes) + @test_with_pir_api def test_api_with_depthTensor(self): num_classes = paddle.assign(np.array([10], dtype=np.int32)) self._run(num_classes) @@ -146,7 +149,7 @@ def test_api_with_dygraph(self): def _run(self, num_classes): label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") - label.desc.set_need_check_feed(False) + # label.desc.set_need_check_feed(False) one_hot_label = functional.one_hot(x=label, num_classes=num_classes) place = base.CPUPlace() @@ -155,7 +158,7 @@ def _run(self, num_classes): ).reshape([6, 1]) exe = base.Executor(place) - exe.run(base.default_startup_program()) + exe.run(paddle.static.default_startup_program()) ret = exe.run( feed={ 'label': label_data, @@ -166,6 +169,7 @@ def _run(self, num_classes): class BadInputTestOnehotV2(unittest.TestCase): + @test_with_pir_api def test_error(self): with base.program_guard(base.Program()): @@ -175,7 +179,7 @@ def test_bad_x(): shape=[4], dtype="float32", ) - label.desc.set_need_check_feed(False) + # label.desc.set_need_check_feed(False) one_hot_label = functional.one_hot(x=label, num_classes=4) self.assertRaises(TypeError, test_bad_x) diff --git a/test/legacy_test/test_one_hot_v2_op.py b/test/legacy_test/test_one_hot_v2_op.py index 9e30a9133c0ec..5265562327d1b 100644 --- a/test/legacy_test/test_one_hot_v2_op.py +++ b/test/legacy_test/test_one_hot_v2_op.py @@ -20,6 +20,7 @@ import paddle from paddle import base from paddle.base import core +from paddle.pir_utils import test_with_pir_api def one_hot_wrapper(x, depth_tensor, **keargs): @@ -48,7 +49,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_cinn=True, check_pir=True) + self.check_output(check_cinn=True) class TestOneHotOp_attr(OpTest): @@ -74,7 +75,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_cinn=True, check_pir=True) + self.check_output(check_cinn=True) class TestOneHotOp_default_dtype(OpTest): @@ -98,7 +99,7 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_cinn=True, check_pir=True) + self.check_output(check_cinn=True) class TestOneHotOp_default_dtype_attr(OpTest): @@ -124,14 +125,16 @@ def setUp(self): self.outputs = {'Out': (out, x_lod)} def test_check_output(self): - self.check_output(check_pir=True) + self.check_output() class TestOneHotOpApi(unittest.TestCase): + @test_with_pir_api def test_api(self): depth = 10 self._run(depth) + @test_with_pir_api def test_api_with_depthTensor(self): depth = paddle.assign(np.array([10], dtype=np.int32)) self._run(depth) @@ -151,16 +154,16 @@ def test_api_with_dygraph(self): def _run(self, depth): label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") - label.desc.set_need_check_feed(False) + # label.desc.set_need_check_feed(False) one_hot_label = paddle.nn.functional.one_hot(x=label, num_classes=depth) - place = base.CPUPlace() + place = paddle.CPUPlace() label_data = np.array( [np.random.randint(0, 10 - 1) for i in range(6)] ).reshape([6, 1]) exe = base.Executor(place) - exe.run(base.default_startup_program()) + exe.run(paddle.static.default_startup_program()) ret = exe.run( feed={ 'label': label_data, @@ -171,8 +174,9 @@ def _run(self, depth): class BadInputTestOnehotV2(unittest.TestCase): + @test_with_pir_api def test_error(self): - with base.program_guard(base.Program()): + with paddle.static.program_guard(paddle.static.Program()): def test_bad_x(): label = paddle.static.data( @@ -180,7 +184,7 @@ def test_bad_x(): shape=[-1, 4], dtype="float32", ) - label.desc.set_need_check_feed(False) + # label.desc.set_need_check_feed(False) one_hot_label = paddle.nn.functional.one_hot( x=label, num_classes=4 ) From a6f8e70c823932830faeae895b00daba32513649 Mon Sep 17 00:00:00 2001 From: ooooo <3164076421@qq.com> Date: Sat, 25 Nov 2023 11:44:22 +0000 Subject: [PATCH 4/8] fix --- test/legacy_test/test_nn_functional_hot_op.py | 40 ++++++++++--------- test/legacy_test/test_one_hot_v2_op.py | 40 ++++++++++--------- 2 files changed, 42 insertions(+), 38 deletions(-) diff --git a/test/legacy_test/test_nn_functional_hot_op.py b/test/legacy_test/test_nn_functional_hot_op.py index 4ae321a128974..004993b5574c5 100644 --- a/test/legacy_test/test_nn_functional_hot_op.py +++ b/test/legacy_test/test_nn_functional_hot_op.py @@ -148,28 +148,30 @@ def test_api_with_dygraph(self): ) def _run(self, num_classes): - label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") - # label.desc.set_need_check_feed(False) - one_hot_label = functional.one_hot(x=label, num_classes=num_classes) - - place = base.CPUPlace() - label_data = np.array( - [np.random.randint(0, 10 - 1) for i in range(6)] - ).reshape([6, 1]) - - exe = base.Executor(place) - exe.run(paddle.static.default_startup_program()) - ret = exe.run( - feed={ - 'label': label_data, - }, - fetch_list=[one_hot_label], - return_numpy=False, - ) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") + # label.desc.set_need_check_feed(False) + one_hot_label = functional.one_hot(x=label, num_classes=num_classes) + + place = base.CPUPlace() + label_data = np.array( + [np.random.randint(0, 10 - 1) for i in range(6)] + ).reshape([6, 1]) + + exe = base.Executor(place) + exe.run(startup) + ret = exe.run( + feed={ + 'label': label_data, + }, + fetch_list=[one_hot_label], + return_numpy=False, + ) class BadInputTestOnehotV2(unittest.TestCase): - @test_with_pir_api def test_error(self): with base.program_guard(base.Program()): diff --git a/test/legacy_test/test_one_hot_v2_op.py b/test/legacy_test/test_one_hot_v2_op.py index 5265562327d1b..02592b78e2828 100644 --- a/test/legacy_test/test_one_hot_v2_op.py +++ b/test/legacy_test/test_one_hot_v2_op.py @@ -153,28 +153,30 @@ def test_api_with_dygraph(self): ) def _run(self, depth): - label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") - # label.desc.set_need_check_feed(False) - one_hot_label = paddle.nn.functional.one_hot(x=label, num_classes=depth) - - place = paddle.CPUPlace() - label_data = np.array( - [np.random.randint(0, 10 - 1) for i in range(6)] - ).reshape([6, 1]) - - exe = base.Executor(place) - exe.run(paddle.static.default_startup_program()) - ret = exe.run( - feed={ - 'label': label_data, - }, - fetch_list=[one_hot_label], - return_numpy=False, - ) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") + # label.desc.set_need_check_feed(False) + one_hot_label = paddle.nn.functional.one_hot(x=label, num_classes=depth) + + place = paddle.CPUPlace() + label_data = np.array( + [np.random.randint(0, 10 - 1) for i in range(6)] + ).reshape([6, 1]) + + exe = base.Executor(place) + exe.run(startup) + ret = exe.run( + feed={ + 'label': label_data, + }, + fetch_list=[one_hot_label], + return_numpy=False, + ) class BadInputTestOnehotV2(unittest.TestCase): - @test_with_pir_api def test_error(self): with paddle.static.program_guard(paddle.static.Program()): From 6d2452d349493fd0d8f3384db8214bc631cfe9dc Mon Sep 17 00:00:00 2001 From: ooo oo <3164076421@qq.com> Date: Thu, 30 Nov 2023 12:57:56 +0800 Subject: [PATCH 5/8] fix --- python/paddle/tensor/search.py | 2 +- python/paddle/vision/ops.py | 10 +- test/legacy_test/test_nn_functional_hot_op.py | 73 ++++++++----- test/legacy_test/test_one_hot_v2_op.py | 77 +++++++++----- test/legacy_test/test_ops_nms.py | 100 +++++++++--------- test/legacy_test/test_zero_dim_tensor.py | 2 + 6 files changed, 160 insertions(+), 104 deletions(-) diff --git a/python/paddle/tensor/search.py b/python/paddle/tensor/search.py index 51f09119ef2e4..758d65b065334 100755 --- a/python/paddle/tensor/search.py +++ b/python/paddle/tensor/search.py @@ -99,7 +99,7 @@ def argsort(x, axis=-1, descending=False, name=None): [1, 1, 0, 2], [0, 2, 1, 1]]]) """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): _, ids = _C_ops.argsort(x, axis, descending) return ids else: diff --git a/python/paddle/vision/ops.py b/python/paddle/vision/ops.py index 47ed79f9818c7..bfefe220bfb4a 100755 --- a/python/paddle/vision/ops.py +++ b/python/paddle/vision/ops.py @@ -1971,10 +1971,12 @@ def _nms(boxes, iou_threshold): categories is not None ), "if category_idxs is given, categories which is a list of unique id of all categories is necessary" - mask = paddle.zeros_like(scores, dtype=paddle.int32) + mask = paddle.zeros_like(scores, dtype='int32') for category_id in categories: - cur_category_boxes_idxs = paddle.where(category_idxs == category_id)[0] + cur_category_boxes_idxs = paddle.where( + paddle.equal(category_idxs, category_id) + )[0] shape = cur_category_boxes_idxs.shape[0] cur_category_boxes_idxs = paddle.reshape( cur_category_boxes_idxs, [shape] @@ -1999,7 +2001,7 @@ def _nms(boxes, iou_threshold): updates = paddle.ones_like( cur_category_boxes_idxs[cur_category_keep_boxes_sub_idxs], - dtype=paddle.int32, + dtype='int32', ) mask = paddle.scatter( mask, @@ -2017,7 +2019,7 @@ def _nms(boxes, iou_threshold): if top_k is None: return keep_boxes_idxs[sorted_sub_indices] - if in_dynamic_or_pir_mode(): + if in_dygraph_mode(): top_k = shape if shape < top_k else top_k _, topk_sub_indices = paddle.topk(scores[keep_boxes_idxs], top_k) return keep_boxes_idxs[topk_sub_indices] diff --git a/test/legacy_test/test_nn_functional_hot_op.py b/test/legacy_test/test_nn_functional_hot_op.py index 4ae321a128974..d8c1ec132bae3 100644 --- a/test/legacy_test/test_nn_functional_hot_op.py +++ b/test/legacy_test/test_nn_functional_hot_op.py @@ -129,13 +129,57 @@ def test_check_output(self): class TestOneHotOpApi(unittest.TestCase): @test_with_pir_api def test_api(self): - num_classes = 10 - self._run(num_classes) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + num_classes = 10 + label = paddle.static.data( + name="label", shape=[-1, 1], dtype="int64" + ) + # label.desc.set_need_check_feed(False) + one_hot_label = functional.one_hot(x=label, num_classes=num_classes) + + place = base.CPUPlace() + label_data = np.array( + [np.random.randint(0, 10 - 1) for i in range(6)] + ).reshape([6, 1]) + + exe = base.Executor(place) + exe.run(startup) + ret = exe.run( + feed={ + 'label': label_data, + }, + fetch_list=[one_hot_label], + return_numpy=False, + ) @test_with_pir_api def test_api_with_depthTensor(self): - num_classes = paddle.assign(np.array([10], dtype=np.int32)) - self._run(num_classes) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + num_classes = paddle.assign(np.array([10], dtype=np.int32)) + label = paddle.static.data( + name="label", shape=[-1, 1], dtype="int64" + ) + # label.desc.set_need_check_feed(False) + one_hot_label = functional.one_hot(x=label, num_classes=num_classes) + + place = base.CPUPlace() + label_data = np.array( + [np.random.randint(0, 10 - 1) for i in range(6)] + ).reshape([6, 1]) + + exe = base.Executor(place) + exe.run(startup) + ret = exe.run( + feed={ + 'label': label_data, + }, + fetch_list=[one_hot_label], + return_numpy=False, + ) def test_api_with_dygraph(self): num_classes = 10 @@ -147,29 +191,8 @@ def test_api_with_dygraph(self): x=base.dygraph.to_variable(label), num_classes=num_classes ) - def _run(self, num_classes): - label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") - # label.desc.set_need_check_feed(False) - one_hot_label = functional.one_hot(x=label, num_classes=num_classes) - - place = base.CPUPlace() - label_data = np.array( - [np.random.randint(0, 10 - 1) for i in range(6)] - ).reshape([6, 1]) - - exe = base.Executor(place) - exe.run(paddle.static.default_startup_program()) - ret = exe.run( - feed={ - 'label': label_data, - }, - fetch_list=[one_hot_label], - return_numpy=False, - ) - class BadInputTestOnehotV2(unittest.TestCase): - @test_with_pir_api def test_error(self): with base.program_guard(base.Program()): diff --git a/test/legacy_test/test_one_hot_v2_op.py b/test/legacy_test/test_one_hot_v2_op.py index 5265562327d1b..f904f8f57a338 100644 --- a/test/legacy_test/test_one_hot_v2_op.py +++ b/test/legacy_test/test_one_hot_v2_op.py @@ -131,13 +131,61 @@ def test_check_output(self): class TestOneHotOpApi(unittest.TestCase): @test_with_pir_api def test_api(self): - depth = 10 - self._run(depth) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + depth = 10 + label = paddle.static.data( + name="label", shape=[-1, 1], dtype="int64" + ) + # label.desc.set_need_check_feed(False) + one_hot_label = paddle.nn.functional.one_hot( + x=label, num_classes=depth + ) + + place = paddle.CPUPlace() + label_data = np.array( + [np.random.randint(0, 10 - 1) for i in range(6)] + ).reshape([6, 1]) + + exe = base.Executor(place) + exe.run(startup) + ret = exe.run( + feed={ + 'label': label_data, + }, + fetch_list=[one_hot_label], + return_numpy=False, + ) @test_with_pir_api def test_api_with_depthTensor(self): - depth = paddle.assign(np.array([10], dtype=np.int32)) - self._run(depth) + main = paddle.static.Program() + startup = paddle.static.Program() + with paddle.static.program_guard(main, startup): + depth = paddle.assign(np.array([10], dtype=np.int32)) + label = paddle.static.data( + name="label", shape=[-1, 1], dtype="int64" + ) + # label.desc.set_need_check_feed(False) + one_hot_label = paddle.nn.functional.one_hot( + x=label, num_classes=depth + ) + + place = paddle.CPUPlace() + label_data = np.array( + [np.random.randint(0, 10 - 1) for i in range(6)] + ).reshape([6, 1]) + + exe = base.Executor(place) + exe.run(startup) + ret = exe.run( + feed={ + 'label': label_data, + }, + fetch_list=[one_hot_label], + return_numpy=False, + ) def test_api_with_dygraph(self): depth = 10 @@ -152,29 +200,8 @@ def test_api_with_dygraph(self): paddle.to_tensor(label), depth ) - def _run(self, depth): - label = paddle.static.data(name="label", shape=[-1, 1], dtype="int64") - # label.desc.set_need_check_feed(False) - one_hot_label = paddle.nn.functional.one_hot(x=label, num_classes=depth) - - place = paddle.CPUPlace() - label_data = np.array( - [np.random.randint(0, 10 - 1) for i in range(6)] - ).reshape([6, 1]) - - exe = base.Executor(place) - exe.run(paddle.static.default_startup_program()) - ret = exe.run( - feed={ - 'label': label_data, - }, - fetch_list=[one_hot_label], - return_numpy=False, - ) - class BadInputTestOnehotV2(unittest.TestCase): - @test_with_pir_api def test_error(self): with paddle.static.program_guard(paddle.static.Program()): diff --git a/test/legacy_test/test_ops_nms.py b/test/legacy_test/test_ops_nms.py index 984d957366419..9358961ad3f75 100644 --- a/test/legacy_test/test_ops_nms.py +++ b/test/legacy_test/test_ops_nms.py @@ -143,55 +143,57 @@ def test_multiclass_nms_dynamic(self): def test_multiclass_nms_static(self): for device in self.devices: for dtype in self.dtypes: - paddle.enable_static() - boxes, scores, category_idxs, categories = gen_args( - self.num_boxes, dtype - ) - boxes_static = paddle.static.data( - shape=boxes.shape, dtype=boxes.dtype, name="boxes" - ) - scores_static = paddle.static.data( - shape=scores.shape, dtype=scores.dtype, name="scores" - ) - category_idxs_static = paddle.static.data( - shape=category_idxs.shape, - dtype=category_idxs.dtype, - name="category_idxs", - ) - out = paddle.vision.ops.nms( - boxes_static, - self.threshold, - scores_static, - category_idxs_static, - categories, - self.topk, - ) - place = paddle.CPUPlace() - if device == 'gpu': - place = paddle.CUDAPlace(0) - exe = paddle.static.Executor(place) - out = exe.run( - paddle.static.default_main_program(), - feed={ - 'boxes': boxes, - 'scores': scores, - 'category_idxs': category_idxs, - }, - fetch_list=[out], - ) - paddle.disable_static() - out_py = multiclass_nms( - boxes, scores, category_idxs, self.threshold, self.topk - ) - out = np.array(out) - out = np.squeeze(out) - np.testing.assert_array_equal( - out, - out_py, - err_msg=f'paddle out: {out}\n py out: {out_py}\n', - ) + with paddle.static.program_guard( + paddle.static.Program(), paddle.static.Program() + ): + paddle.enable_static() + boxes, scores, category_idxs, categories = gen_args( + self.num_boxes, dtype + ) + boxes_static = paddle.static.data( + shape=boxes.shape, dtype=boxes.dtype, name="boxes" + ) + scores_static = paddle.static.data( + shape=scores.shape, dtype=scores.dtype, name="scores" + ) + category_idxs_static = paddle.static.data( + shape=category_idxs.shape, + dtype=category_idxs.dtype, + name="category_idxs", + ) + out = paddle.vision.ops.nms( + boxes_static, + self.threshold, + scores_static, + category_idxs_static, + categories, + self.topk, + ) + place = paddle.CPUPlace() + if device == 'gpu': + place = paddle.CUDAPlace(0) + exe = paddle.static.Executor(place) + out = exe.run( + paddle.static.default_main_program(), + feed={ + 'boxes': boxes, + 'scores': scores, + 'category_idxs': category_idxs, + }, + fetch_list=[out], + ) + paddle.disable_static() + out_py = multiclass_nms( + boxes, scores, category_idxs, self.threshold, self.topk + ) + out = np.array(out) + out = np.squeeze(out) + np.testing.assert_array_equal( + out, + out_py, + err_msg=f'paddle out: {out}\n py out: {out_py}\n', + ) - @test_with_pir_api def test_multiclass_nms_dynamic_to_static(self): for device in self.devices: for dtype in self.dtypes: @@ -206,7 +208,7 @@ def fun(x): 0.1, paddle.to_tensor(scores), paddle.to_tensor(category_idxs), - categories, + paddle.to_tensor(categories), 10, ) return out diff --git a/test/legacy_test/test_zero_dim_tensor.py b/test/legacy_test/test_zero_dim_tensor.py index ed6eb3786e077..ee42d56e08831 100644 --- a/test/legacy_test/test_zero_dim_tensor.py +++ b/test/legacy_test/test_zero_dim_tensor.py @@ -25,6 +25,7 @@ import paddle import paddle.nn.functional as F +from paddle.pir_utils import test_with_pir_api unary_api_list = [ paddle.nn.functional.elu, @@ -5914,6 +5915,7 @@ def test_static_embedding(self): res = self.exe.run(prog, fetch_list=[emb]) self.assertEqual(res[0].shape, (3,)) + @test_with_pir_api def test_one_hot_label(self): label = paddle.full(shape=[], fill_value=2, dtype='int64') one_hot_label = paddle.nn.functional.one_hot(label, num_classes=4) From e15dea21c8e143ae2c767a2498ed8e8b45c52111 Mon Sep 17 00:00:00 2001 From: ooo oo <3164076421@qq.com> Date: Fri, 1 Dec 2023 22:06:09 +0800 Subject: [PATCH 6/8] fix --- test/legacy_test/test_nn_functional_hot_op.py | 6 +++--- test/legacy_test/test_one_hot_v2_op.py | 6 +++--- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/test/legacy_test/test_nn_functional_hot_op.py b/test/legacy_test/test_nn_functional_hot_op.py index d8c1ec132bae3..1b81802b53625 100644 --- a/test/legacy_test/test_nn_functional_hot_op.py +++ b/test/legacy_test/test_nn_functional_hot_op.py @@ -136,13 +136,13 @@ def test_api(self): label = paddle.static.data( name="label", shape=[-1, 1], dtype="int64" ) - # label.desc.set_need_check_feed(False) one_hot_label = functional.one_hot(x=label, num_classes=num_classes) place = base.CPUPlace() label_data = np.array( [np.random.randint(0, 10 - 1) for i in range(6)] ).reshape([6, 1]) + label_data = label_data.astype('int64') exe = base.Executor(place) exe.run(startup) @@ -163,13 +163,13 @@ def test_api_with_depthTensor(self): label = paddle.static.data( name="label", shape=[-1, 1], dtype="int64" ) - # label.desc.set_need_check_feed(False) one_hot_label = functional.one_hot(x=label, num_classes=num_classes) place = base.CPUPlace() label_data = np.array( [np.random.randint(0, 10 - 1) for i in range(6)] ).reshape([6, 1]) + label_data = label_data.astype('int64') exe = base.Executor(place) exe.run(startup) @@ -202,7 +202,7 @@ def test_bad_x(): shape=[4], dtype="float32", ) - # label.desc.set_need_check_feed(False) + label.desc.set_need_check_feed(False) one_hot_label = functional.one_hot(x=label, num_classes=4) self.assertRaises(TypeError, test_bad_x) diff --git a/test/legacy_test/test_one_hot_v2_op.py b/test/legacy_test/test_one_hot_v2_op.py index f904f8f57a338..7d8cca60b60cb 100644 --- a/test/legacy_test/test_one_hot_v2_op.py +++ b/test/legacy_test/test_one_hot_v2_op.py @@ -138,7 +138,6 @@ def test_api(self): label = paddle.static.data( name="label", shape=[-1, 1], dtype="int64" ) - # label.desc.set_need_check_feed(False) one_hot_label = paddle.nn.functional.one_hot( x=label, num_classes=depth ) @@ -147,6 +146,7 @@ def test_api(self): label_data = np.array( [np.random.randint(0, 10 - 1) for i in range(6)] ).reshape([6, 1]) + label_data = label_data.astype('int64') exe = base.Executor(place) exe.run(startup) @@ -167,7 +167,6 @@ def test_api_with_depthTensor(self): label = paddle.static.data( name="label", shape=[-1, 1], dtype="int64" ) - # label.desc.set_need_check_feed(False) one_hot_label = paddle.nn.functional.one_hot( x=label, num_classes=depth ) @@ -176,6 +175,7 @@ def test_api_with_depthTensor(self): label_data = np.array( [np.random.randint(0, 10 - 1) for i in range(6)] ).reshape([6, 1]) + label_data = label_data.astype('int64') exe = base.Executor(place) exe.run(startup) @@ -211,7 +211,7 @@ def test_bad_x(): shape=[-1, 4], dtype="float32", ) - # label.desc.set_need_check_feed(False) + label.desc.set_need_check_feed(False) one_hot_label = paddle.nn.functional.one_hot( x=label, num_classes=4 ) From c64cde9c81170666f692a8ccabbe02376d4322ef Mon Sep 17 00:00:00 2001 From: ooo oo <3164076421@qq.com> Date: Sat, 2 Dec 2023 08:43:36 +0800 Subject: [PATCH 7/8] update --- test/legacy_test/test_nn_functional_hot_op.py | 8 -------- 1 file changed, 8 deletions(-) diff --git a/test/legacy_test/test_nn_functional_hot_op.py b/test/legacy_test/test_nn_functional_hot_op.py index 1b81802b53625..0d7a489eb223b 100644 --- a/test/legacy_test/test_nn_functional_hot_op.py +++ b/test/legacy_test/test_nn_functional_hot_op.py @@ -24,14 +24,9 @@ from paddle.pir_utils import test_with_pir_api -def one_hot_wrapper(x, depth_tensor, **keargs): - return paddle.nn.functional.one_hot(x, depth_tensor) - - class TestOneHotOp(OpTest): def setUp(self): self.op_type = 'one_hot_v2' - self.python_api = one_hot_wrapper depth = 10 depth_np = np.array(10).astype('int32') dimension = 12 @@ -55,7 +50,6 @@ def test_check_output(self): class TestOneHotOp_attr(OpTest): def setUp(self): self.op_type = 'one_hot_v2' - self.python_api = one_hot_wrapper depth = 10 dimension = 12 x_lod = [[4, 1, 3, 3]] @@ -80,7 +74,6 @@ def test_check_output(self): class TestOneHotOp_default_dtype(OpTest): def setUp(self): self.op_type = 'one_hot_v2' - self.python_api = one_hot_wrapper depth = 10 depth_np = np.array(10).astype('int32') dimension = 12 @@ -104,7 +97,6 @@ def test_check_output(self): class TestOneHotOp_default_dtype_attr(OpTest): def setUp(self): self.op_type = 'one_hot_v2' - self.python_api = one_hot_wrapper depth = 10 dimension = 12 x_lod = [[4, 1, 3, 3]] From 55a4b4f9473dcebb36c064f9b0bb03d5f955cedb Mon Sep 17 00:00:00 2001 From: ooo oo <3164076421@qq.com> Date: Mon, 4 Dec 2023 17:51:06 +0800 Subject: [PATCH 8/8] fix --- python/paddle/vision/ops.py | 2 +- test/legacy_test/test_ops_nms.py | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/vision/ops.py b/python/paddle/vision/ops.py index f5c8eec8b9c04..a9d67c8104e13 100755 --- a/python/paddle/vision/ops.py +++ b/python/paddle/vision/ops.py @@ -1975,7 +1975,7 @@ def _nms(boxes, iou_threshold): for category_id in categories: cur_category_boxes_idxs = paddle.where( - paddle.equal(category_idxs, category_id) + paddle.equal(category_idxs, paddle.to_tensor(category_id)) )[0] shape = cur_category_boxes_idxs.shape[0] cur_category_boxes_idxs = paddle.reshape( diff --git a/test/legacy_test/test_ops_nms.py b/test/legacy_test/test_ops_nms.py index 9358961ad3f75..a10640f964a84 100644 --- a/test/legacy_test/test_ops_nms.py +++ b/test/legacy_test/test_ops_nms.py @@ -208,7 +208,7 @@ def fun(x): 0.1, paddle.to_tensor(scores), paddle.to_tensor(category_idxs), - paddle.to_tensor(categories), + categories, 10, ) return out