Skip to content

Commit

Permalink
[PIR]Open more TestCase UTs (PaddlePaddle#57977)
Browse files Browse the repository at this point in the history
  • Loading branch information
0x45f authored Oct 20, 2023
1 parent 5ff89c7 commit 1877039
Show file tree
Hide file tree
Showing 6 changed files with 50 additions and 32 deletions.
2 changes: 1 addition & 1 deletion python/paddle/base/dygraph/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -595,7 +595,7 @@ def guard(place=None):
if place is not None:
expected_place = _get_paddle_place(place)
else:
expected_place = framework._current_expected_place()
expected_place = framework._current_expected_place_()

with framework.program_guard(train, startup):
with framework.unique_name.guard():
Expand Down
1 change: 1 addition & 0 deletions test/legacy_test/test_activation_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -2383,6 +2383,7 @@ def setUp(self):
def executed_api(self):
self.relu = F.relu

@test_with_pir_api
def test_static_api(self):
with static_guard():
with paddle.static.program_guard(paddle.static.Program()):
Expand Down
2 changes: 2 additions & 0 deletions test/legacy_test/test_allclose_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import paddle
from paddle.base import core
from paddle.pir_utils import test_with_pir_api


class TestAllcloseOp(OpTest):
Expand Down Expand Up @@ -174,6 +175,7 @@ def test_equal_nan():


class TestAllcloseOpFp16(unittest.TestCase):
@test_with_pir_api
def test_fp16(self):
x_data = np.random.rand(10, 10).astype('float16')
y_data = np.random.rand(10, 10).astype('float16')
Expand Down
53 changes: 29 additions & 24 deletions test/legacy_test/test_clip_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
import paddle
from paddle import base
from paddle.base import Program, core, program_guard
from paddle.pir_utils import test_with_pir_api


class TestClipOp(OpTest):
Expand Down Expand Up @@ -266,40 +267,43 @@ class TestClipAPI(unittest.TestCase):
def _executed_api(self, x, min=None, max=None):
return paddle.clip(x, min, max)

@test_with_pir_api
def test_clip(self):
paddle.enable_static()
data_shape = [1, 9, 9, 4]
data = np.random.random(data_shape).astype('float32')
images = paddle.static.data(
name='image', shape=data_shape, dtype='float32'
)
min = paddle.static.data(name='min', shape=[1], dtype='float32')
max = paddle.static.data(name='max', shape=[1], dtype='float32')

place = (
base.CUDAPlace(0)
if base.core.is_compiled_with_cuda()
else base.CPUPlace()
)
exe = base.Executor(place)

out_1 = self._executed_api(images, min=min, max=max)
out_2 = self._executed_api(images, min=0.2, max=0.9)
out_3 = self._executed_api(images, min=0.3)
out_4 = self._executed_api(images, max=0.7)
out_5 = self._executed_api(images, min=min)
out_6 = self._executed_api(images, max=max)
out_7 = self._executed_api(images, max=-1.0)
out_8 = self._executed_api(images)
out_9 = self._executed_api(
paddle.cast(images, 'float64'), min=0.2, max=0.9
)
out_10 = self._executed_api(
paddle.cast(images * 10, 'int32'), min=2, max=8
)
out_11 = self._executed_api(
paddle.cast(images * 10, 'int64'), min=2, max=8
)
main = paddle.static.Program()
startup = paddle.static.Program()
with paddle.static.program_guard(main, startup):
images = paddle.static.data(
name='image', shape=data_shape, dtype='float32'
)
min = paddle.static.data(name='min', shape=[1], dtype='float32')
max = paddle.static.data(name='max', shape=[1], dtype='float32')
out_1 = self._executed_api(images, min=min, max=max)
out_2 = self._executed_api(images, min=0.2, max=0.9)
out_3 = self._executed_api(images, min=0.3)
out_4 = self._executed_api(images, max=0.7)
out_5 = self._executed_api(images, min=min)
out_6 = self._executed_api(images, max=max)
out_7 = self._executed_api(images, max=-1.0)
out_8 = self._executed_api(images)
out_9 = self._executed_api(
paddle.cast(images, 'float64'), min=0.2, max=0.9
)
out_10 = self._executed_api(
paddle.cast(images * 10, 'int32'), min=2, max=8
)
out_11 = self._executed_api(
paddle.cast(images * 10, 'int64'), min=2, max=8
)

(
res1,
Expand All @@ -314,7 +318,7 @@ def test_clip(self):
res10,
res11,
) = exe.run(
base.default_main_program(),
main,
feed={
"image": data,
"min": np.array([0.2]).astype('float32'),
Expand Down Expand Up @@ -430,6 +434,7 @@ def test_errors(self):


class TestClipOpFp16(unittest.TestCase):
@test_with_pir_api
def test_fp16(self):
paddle.enable_static()
data_shape = [1, 9, 9, 4]
Expand Down
5 changes: 4 additions & 1 deletion test/legacy_test/test_flatten_contiguous_range_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@

import paddle
from paddle.base import core
from paddle.pir_utils import test_with_pir_api


class TestFlattenOp(OpTest):
Expand Down Expand Up @@ -461,6 +462,7 @@ class TestStaticFlattenPythonAPI(unittest.TestCase):
def execute_api(self, x, start_axis=0, stop_axis=-1):
return paddle.flatten(x, start_axis, stop_axis)

@test_with_pir_api
def test_static_api(self):
paddle.enable_static()
np_x = np.random.rand(2, 3, 4, 4).astype('float32')
Expand All @@ -481,6 +483,7 @@ class TestStaticFlattenInferShapePythonAPI(unittest.TestCase):
def execute_api(self, x, start_axis=0, stop_axis=-1):
return paddle.flatten(x, start_axis, stop_axis)

@test_with_pir_api
def test_static_api(self):
paddle.enable_static()
main_prog = paddle.static.Program()
Expand All @@ -489,7 +492,7 @@ def test_static_api(self):
name="x", shape=[-1, 3, -1, -1], dtype='float32'
)
out = self.execute_api(x, start_axis=2, stop_axis=3)
self.assertTrue((-1, 3, -1) == out.shape)
self.assertTrue((-1, 3, -1) == tuple(out.shape))


class TestStaticInplaceFlattenPythonAPI(TestStaticFlattenPythonAPI):
Expand Down
19 changes: 13 additions & 6 deletions test/legacy_test/test_reduce_op.py
Original file line number Diff line number Diff line change
Expand Up @@ -1713,21 +1713,25 @@ def setUp(self):
self.places.append(base.CUDAPlace(0))

def check_static_result(self, place):
with base.program_guard(base.Program(), base.Program()):
main = paddle.static.Program()
startup = paddle.static.Program()
with base.program_guard(main, startup):
input = paddle.static.data(name="input", shape=[4, 4], dtype="bool")
result = paddle.all(x=input)
input_np = np.random.randint(0, 2, [4, 4]).astype("bool")

exe = base.Executor(place)
fetches = exe.run(
base.default_main_program(),
main,
feed={"input": input_np},
fetch_list=[result],
)
self.assertTrue((fetches[0] == np.all(input_np)).all())

def check_static_float_result(self, place):
with base.program_guard(base.Program(), base.Program()):
main = paddle.static.Program()
startup = paddle.static.Program()
with base.program_guard(main, startup):
input = paddle.static.data(
name="input", shape=[4, 4], dtype="float"
)
Expand All @@ -1736,26 +1740,29 @@ def check_static_float_result(self, place):

exe = base.Executor(place)
fetches = exe.run(
base.default_main_program(),
main,
feed={"input": input_np},
fetch_list=[result],
)
self.assertTrue((fetches[0] == np.all(input_np)).all())

def check_static_int_result(self, place):
with base.program_guard(base.Program(), base.Program()):
main = paddle.static.Program()
startup = paddle.static.Program()
with base.program_guard(main, startup):
input = paddle.static.data(name="input", shape=[4, 4], dtype="int")
result = paddle.all(x=input)
input_np = np.random.randint(0, 2, [4, 4]).astype("int")

exe = base.Executor(place)
fetches = exe.run(
base.default_main_program(),
main,
feed={"input": input_np},
fetch_list=[result],
)
self.assertTrue((fetches[0] == np.all(input_np)).all())

@test_with_pir_api
def test_static(self):
for place in self.places:
self.check_static_result(place=place)
Expand Down

0 comments on commit 1877039

Please sign in to comment.