From db6dde8ae103bd85ef1434e9af4f9e69c80a8502 Mon Sep 17 00:00:00 2001 From: 0x45f Date: Tue, 31 Oct 2023 03:59:29 +0000 Subject: [PATCH 1/5] [PIR]Migrate CrossEntropyLoss into pir --- python/paddle/nn/functional/common.py | 2 +- python/paddle/nn/functional/input.py | 4 +-- python/paddle/nn/functional/loss.py | 2 +- test/legacy_test/test_cross_entropy_loss.py | 33 ++++++++++++++++++++- 4 files changed, 36 insertions(+), 5 deletions(-) diff --git a/python/paddle/nn/functional/common.py b/python/paddle/nn/functional/common.py index 62050410b9c1a..9674e5bc8e01f 100644 --- a/python/paddle/nn/functional/common.py +++ b/python/paddle/nn/functional/common.py @@ -2050,7 +2050,7 @@ def label_smooth(label, prior_dist=None, epsilon=0.1, name=None): if epsilon > 1.0 or epsilon < 0.0: raise ValueError("The value of epsilon must be between 0 and 1.") - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.label_smooth(label, prior_dist, float(epsilon)) check_variable_and_dtype( diff --git a/python/paddle/nn/functional/input.py b/python/paddle/nn/functional/input.py index e38797a1115ae..c13ebf986e475 100644 --- a/python/paddle/nn/functional/input.py +++ b/python/paddle/nn/functional/input.py @@ -17,7 +17,7 @@ from ...base.data_feeder import check_variable_and_dtype from ...base.layer_helper import LayerHelper from ...common_ops_import import Variable -from ...framework import in_dynamic_mode, in_dynamic_or_pir_mode +from ...framework import in_dynamic_or_pir_mode __all__ = [] @@ -89,7 +89,7 @@ def one_hot(x, num_classes, name=None): """ - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): return _C_ops.one_hot(x, num_classes) else: check_variable_and_dtype(x, 'input', ['int32', 'int64'], 'one_hot_v2') diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index f9bb57d616a5d..ad5cd6745c2d9 100644 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -2889,7 +2889,7 @@ def cross_entropy( # 2. else # numerator: loss's weighted sum # denominator: cal the sum of weight where the sample's class_index!=ignore_index - is_ignore = label == ignore_index + is_ignore = paddle.equal(label, ignore_index) mask = ~is_ignore if paddle.count_nonzero(is_ignore) > 0: # ignore label out_sum = _C_ops.sum(out, [], None, False) diff --git a/test/legacy_test/test_cross_entropy_loss.py b/test/legacy_test/test_cross_entropy_loss.py index 78901bb75bf1b..5c50ceab5dbe6 100644 --- a/test/legacy_test/test_cross_entropy_loss.py +++ b/test/legacy_test/test_cross_entropy_loss.py @@ -21,6 +21,7 @@ import paddle from paddle import base from paddle.base import Program, program_guard +from paddle.pir_utils import test_with_pir_api def label_smooth(label, C, epsilon, is_onehot=True): @@ -272,6 +273,7 @@ def test_softmax_with_cross_entropy(self): # soft_label test start # soft_label test 1 + @test_with_pir_api def test_cross_entropy_loss_soft_1d(self): self.numeric_stable_mode = False self.soft_label = True @@ -360,6 +362,7 @@ def test_cross_entropy_loss_soft_1d(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # soft_label test 2 + @test_with_pir_api def test_cross_entropy_loss_soft_1d_weight(self): self.numeric_stable_mode = False self.soft_label = True @@ -460,6 +463,7 @@ def test_cross_entropy_loss_soft_1d_weight(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # soft_label test 3 + @test_with_pir_api def test_cross_entropy_loss_soft_1d_mean(self): self.numeric_stable_mode = False self.soft_label = True @@ -544,6 +548,7 @@ def test_cross_entropy_loss_soft_1d_mean(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # soft_label test 4 + @test_with_pir_api def test_cross_entropy_loss_soft_1d_weight_mean(self): self.numeric_stable_mode = False self.soft_label = True @@ -634,6 +639,7 @@ def test_cross_entropy_loss_soft_1d_weight_mean(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # soft_label test 5 + @test_with_pir_api def test_cross_entropy_loss_soft_2d(self): def inner_cross_entropy_loss_soft_2d(soft_label): self.numeric_stable_mode = False @@ -739,6 +745,7 @@ def inner_cross_entropy_loss_soft_2d(soft_label): inner_cross_entropy_loss_soft_2d(False) # soft_label test 6 + @test_with_pir_api def test_cross_entropy_loss_soft_2d_weight_mean(self): self.numeric_stable_mode = False self.soft_label = True @@ -840,6 +847,7 @@ def test_cross_entropy_loss_soft_2d_weight_mean(self): # soft_label test end # label_smoothing test 1 + @test_with_pir_api def test_cross_entropy_loss_onehot_label_smoothing_1d(self): self.numeric_stable_mode = False self.soft_label = True @@ -937,6 +945,7 @@ def test_cross_entropy_loss_onehot_label_smoothing_1d(self): paddle.enable_static() # label_smoothing test 2 + @test_with_pir_api def test_cross_entropy_loss_onehot_label_smoothing_1d_weight_mean(self): self.numeric_stable_mode = False self.soft_label = True @@ -1036,6 +1045,7 @@ def test_cross_entropy_loss_onehot_label_smoothing_1d_weight_mean(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # label_smoothing test 3 + @test_with_pir_api def test_cross_entropy_loss_onehot_label_smoothing_2d(self): self.numeric_stable_mode = False self.soft_label = True @@ -1143,6 +1153,7 @@ def test_cross_entropy_loss_onehot_label_smoothing_2d(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # label_smoothing test 4 + @test_with_pir_api def test_cross_entropy_loss_onehot_label_smoothing_2d_weight_mean(self): self.numeric_stable_mode = False self.soft_label = True @@ -1253,6 +1264,7 @@ def test_cross_entropy_loss_onehot_label_smoothing_2d_weight_mean(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # label_smoothing test 5 + @test_with_pir_api def test_cross_entropy_loss_integer_label_smoothing_1d(self): self.numeric_stable_mode = False self.soft_label = True @@ -1350,6 +1362,7 @@ def test_cross_entropy_loss_integer_label_smoothing_1d(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # label_smoothing test 6 + @test_with_pir_api def test_cross_entropy_loss_integer_label_smoothing_1d_weight_mean(self): self.numeric_stable_mode = False self.soft_label = True @@ -1452,6 +1465,7 @@ def test_cross_entropy_loss_integer_label_smoothing_1d_weight_mean(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # label_smoothing test 7 + @test_with_pir_api def test_cross_entropy_loss_integer_label_smoothing_2d(self): self.numeric_stable_mode = False self.soft_label = True @@ -1557,6 +1571,7 @@ def test_cross_entropy_loss_integer_label_smoothing_2d(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # label_smoothing test 8 + @test_with_pir_api def test_cross_entropy_loss_integer_label_smoothing_2d_weight_mean(self): self.numeric_stable_mode = False self.soft_label = True @@ -1667,7 +1682,7 @@ def test_cross_entropy_loss_integer_label_smoothing_2d_weight_mean(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) # label_smoothing test end - + @test_with_pir_api def test_cross_entropy_loss_1d_with_mean_ignore(self): input_np = np.random.random([2, 4]).astype(self.dtype) label_np = np.random.randint(0, 4, size=(2)).astype(np.int64) @@ -1714,6 +1729,7 @@ def test_cross_entropy_loss_1d_with_mean_ignore(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_with_mean_ignore_negative(self): N = 100 C = 200 @@ -1763,6 +1779,7 @@ def test_cross_entropy_loss_1d_with_mean_ignore_negative(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_with_weight_mean_ignore(self): N = 100 C = 200 @@ -1846,6 +1863,7 @@ def test_cross_entropy_loss_1d_with_weight_mean_ignore_exceedlabel(self): np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_with_weight_mean(self): input_np = np.random.random([2, 4]).astype(self.dtype) label_np = np.random.randint(0, 4, size=(2)).astype(np.int64) @@ -1901,6 +1919,7 @@ def test_cross_entropy_loss_1d_with_weight_mean(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_with_weight_sum(self): input_np = np.random.random([100, 200]).astype(self.dtype) # N,C label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) # N,1 @@ -1954,6 +1973,7 @@ def test_cross_entropy_loss_1d_with_weight_sum(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_with_weight_none(self): input_np = np.random.random([100, 200]).astype(self.dtype) # N,C label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) # N,1 @@ -2011,6 +2031,7 @@ def test_cross_entropy_loss_1d_with_weight_none(self): np.testing.assert_allclose(static_ret, expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_with_weight_none_func(self): input_np = np.random.random([100, 200]).astype(self.dtype) # N,C label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) # N @@ -2064,6 +2085,7 @@ def test_cross_entropy_loss_1d_with_weight_none_func(self): np.testing.assert_allclose(static_ret, expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_mean(self): input_np = np.random.random([100, 200]).astype(self.dtype) # N,C label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) # N,1 @@ -2102,6 +2124,7 @@ def test_cross_entropy_loss_1d_mean(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_sum(self): input_np = np.random.random([100, 200]).astype(self.dtype) # N,C label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) # N,1 @@ -2144,6 +2167,7 @@ def test_cross_entropy_loss_1d_sum(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_1d_none(self): input_np = np.random.random([100, 200]).astype(self.dtype) # N,C label_np = np.random.randint(0, 100, size=(100)).astype(np.int64) # N,1 @@ -2188,6 +2212,7 @@ def test_cross_entropy_loss_1d_none(self): np.testing.assert_allclose(static_ret, expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_2d_with_weight_none(self): input_np = np.random.random(size=(2, 2, 2, 3)).astype( self.dtype @@ -2250,6 +2275,7 @@ def test_cross_entropy_loss_2d_with_weight_none(self): np.testing.assert_allclose(static_ret, expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_2d_with_weight_axis_change_mean(self): input_np = np.random.random(size=(2, 3, 2, 2)).astype( self.dtype @@ -2341,6 +2367,7 @@ def test_cross_entropy_loss_2d_with_weight_mean_ignore_exceedlabel(self): )[0] np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_2d_with_weight_mean(self): input_np = np.random.random(size=(2, 2, 2, 3)).astype( self.dtype @@ -2400,6 +2427,7 @@ def test_cross_entropy_loss_2d_with_weight_mean(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_2d_with_weight_sum(self): input_np = np.random.random(size=(2, 2, 2, 3)).astype( self.dtype @@ -2460,6 +2488,7 @@ def test_cross_entropy_loss_2d_with_weight_sum(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_2d_none(self): input_np = np.random.random(size=(2, 2, 2, 3)).astype( self.dtype @@ -2513,6 +2542,7 @@ def test_cross_entropy_loss_2d_none(self): np.testing.assert_allclose(static_ret, expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_2d_mean(self): input_np = np.random.random(size=(2, 2, 2, 3)).astype( self.dtype @@ -2567,6 +2597,7 @@ def test_cross_entropy_loss_2d_mean(self): np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) + @test_with_pir_api def test_cross_entropy_loss_2d_sum(self): input_np = np.random.random(size=(2, 2, 2, 3)).astype( self.dtype From 76c431bb9f925c4830edfdf8bf72fffbb12a0799 Mon Sep 17 00:00:00 2001 From: 0x45f Date: Tue, 31 Oct 2023 06:58:39 +0000 Subject: [PATCH 2/5] Refine code --- python/paddle/nn/functional/loss.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index ad5cd6745c2d9..583a19f1f7997 100644 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -17,7 +17,7 @@ # TODO: define loss functions of neural network import paddle from paddle import _C_ops, base, in_dynamic_mode -from paddle.framework import core +from paddle.framework import core, in_dynamic_or_pir_mode from paddle.static.nn.control_flow import Assert from paddle.utils import deprecated @@ -2806,7 +2806,7 @@ def cross_entropy( label = label.astype(input.dtype) label_dims = len(list(label.shape)) - if in_dynamic_mode(): + if in_dynamic_or_pir_mode(): if not soft_label: valid_label = ( paddle.cast(label != ignore_index, dtype=label.dtype) * label From e450eb1b122405d5f00074884e3df155fa014f8b Mon Sep 17 00:00:00 2001 From: 0x45f Date: Tue, 31 Oct 2023 09:43:43 +0000 Subject: [PATCH 3/5] Refine == --- python/paddle/nn/functional/loss.py | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index 583a19f1f7997..0307f0d6e47fb 100644 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -2899,21 +2899,23 @@ def cross_entropy( if weight is None: mask = paddle.cast(mask, dtype=out_sum.dtype) count = _C_ops.sum(mask, [], None, False) - ret = out_sum / (count + (count == 0.0)) + ret = out_sum / (count + paddle.equal(count, 0.0)) else: mask = paddle.cast(mask, weight_gather_reshape.dtype) weight_ignored = _C_ops.multiply( mask, weight_gather_reshape ) weight_sum = _C_ops.sum(weight_ignored, [], None, False) - ret = out_sum / (weight_sum + (weight_sum == 0.0)) + ret = out_sum / (weight_sum + paddle.equal(weight_sum, 0.0)) return ret elif weight is not None: out_sum = _C_ops.sum(out, [], None, False) total_weight = _C_ops.sum( weight_gather_reshape, [], None, False ) - return out_sum / (total_weight + (total_weight == 0.0)) + return out_sum / ( + total_weight + paddle.equal(total_weight, 0.0) + ) else: return _C_ops.mean_all(out) From 419474c6abdd01ac0875876b542efcfa4a526749 Mon Sep 17 00:00:00 2001 From: 0x45f Date: Wed, 1 Nov 2023 12:16:20 +0000 Subject: [PATCH 4/5] Fix code --- python/paddle/nn/functional/loss.py | 65 ++++++++++++--------- test/legacy_test/test_cross_entropy_loss.py | 1 + 2 files changed, 37 insertions(+), 29 deletions(-) diff --git a/python/paddle/nn/functional/loss.py b/python/paddle/nn/functional/loss.py index 0307f0d6e47fb..d64a65e868de5 100644 --- a/python/paddle/nn/functional/loss.py +++ b/python/paddle/nn/functional/loss.py @@ -17,12 +17,12 @@ # TODO: define loss functions of neural network import paddle from paddle import _C_ops, base, in_dynamic_mode -from paddle.framework import core, in_dynamic_or_pir_mode +from paddle.framework import core from paddle.static.nn.control_flow import Assert from paddle.utils import deprecated from ...base.data_feeder import check_variable_and_dtype -from ...base.framework import _current_expected_place +from ...base.framework import _current_expected_place, in_pir_mode from ...base.layer_helper import LayerHelper from ...common_ops_import import Variable from ...tensor.manipulation import reshape @@ -2806,7 +2806,7 @@ def cross_entropy( label = label.astype(input.dtype) label_dims = len(list(label.shape)) - if in_dynamic_or_pir_mode(): + if in_dynamic_mode(): if not soft_label: valid_label = ( paddle.cast(label != ignore_index, dtype=label.dtype) * label @@ -2889,7 +2889,7 @@ def cross_entropy( # 2. else # numerator: loss's weighted sum # denominator: cal the sum of weight where the sample's class_index!=ignore_index - is_ignore = paddle.equal(label, ignore_index) + is_ignore = label == ignore_index mask = ~is_ignore if paddle.count_nonzero(is_ignore) > 0: # ignore label out_sum = _C_ops.sum(out, [], None, False) @@ -2899,23 +2899,21 @@ def cross_entropy( if weight is None: mask = paddle.cast(mask, dtype=out_sum.dtype) count = _C_ops.sum(mask, [], None, False) - ret = out_sum / (count + paddle.equal(count, 0.0)) + ret = out_sum / (count + (count == 0.0)) else: mask = paddle.cast(mask, weight_gather_reshape.dtype) weight_ignored = _C_ops.multiply( mask, weight_gather_reshape ) weight_sum = _C_ops.sum(weight_ignored, [], None, False) - ret = out_sum / (weight_sum + paddle.equal(weight_sum, 0.0)) + ret = out_sum / (weight_sum + (weight_sum == 0.0)) return ret elif weight is not None: out_sum = _C_ops.sum(out, [], None, False) total_weight = _C_ops.sum( weight_gather_reshape, [], None, False ) - return out_sum / ( - total_weight + paddle.equal(total_weight, 0.0) - ) + return out_sum / (total_weight + (total_weight == 0.0)) else: return _C_ops.mean_all(out) @@ -2937,24 +2935,31 @@ def cross_entropy( ['uint8', 'int8', 'int16', 'int32', 'int64', 'float32', 'float64'], 'softmax_cross_entropy', ) - attrs = { - 'soft_label': soft_label, - 'ignore_index': ignore_index, - 'numeric_stable_mode': True, - 'axis': axis, - 'use_softmax': use_softmax, - } - helper = LayerHelper('softmax_with_cross_entropy', **locals()) - softmax = helper.create_variable_for_type_inference(dtype=input.dtype) - out = helper.create_variable_for_type_inference(dtype=input.dtype) + if in_pir_mode(): + softmax, out = _C_ops.cross_entropy_with_softmax( + input, label, soft_label, use_softmax, True, ignore_index, axis + ) + else: + attrs = { + 'soft_label': soft_label, + 'ignore_index': ignore_index, + 'numeric_stable_mode': True, + 'axis': axis, + 'use_softmax': use_softmax, + } + helper = LayerHelper('softmax_with_cross_entropy', **locals()) + softmax = helper.create_variable_for_type_inference( + dtype=input.dtype + ) + out = helper.create_variable_for_type_inference(dtype=input.dtype) - outputs = {'Softmax': softmax, 'Loss': out} - helper.append_op( - type='softmax_with_cross_entropy', - inputs={'Logits': input, 'Label': label}, - outputs=outputs, - attrs=attrs, - ) + outputs = {'Softmax': softmax, 'Loss': out} + helper.append_op( + type='softmax_with_cross_entropy', + inputs={'Logits': input, 'Label': label}, + outputs=outputs, + attrs=attrs, + ) if weight is not None: check_variable_and_dtype( @@ -3038,19 +3043,21 @@ def cross_entropy( if weight is None: mask = paddle.cast(mask, dtype=out_sum.dtype) count = paddle.sum(mask, name=name) - ret = out_sum / (count + (count == 0.0)) + ret = out_sum / (count + paddle.equal(count, 0.0)) else: mask = paddle.cast(mask, weight_gather_reshape.dtype) weight_ignored = paddle.multiply( mask, weight_gather_reshape ) weight_sum = paddle.sum(weight_ignored, name=name) - ret = out_sum / (weight_sum + (weight_sum == 0.0)) + ret = out_sum / (weight_sum + paddle.equal(weight_sum, 0.0)) return ret elif weight is not None: out_sum = paddle.sum(out, name=name) total_weight = paddle.sum(weight_gather_reshape) - return out_sum / (total_weight + (total_weight == 0.0)) + return out_sum / ( + total_weight + paddle.equal(total_weight, 0.0) + ) else: return paddle.mean(out, name=name) diff --git a/test/legacy_test/test_cross_entropy_loss.py b/test/legacy_test/test_cross_entropy_loss.py index 5c50ceab5dbe6..f9a2bda323514 100644 --- a/test/legacy_test/test_cross_entropy_loss.py +++ b/test/legacy_test/test_cross_entropy_loss.py @@ -544,6 +544,7 @@ def test_cross_entropy_loss_soft_1d_mean(self): self.assertIsNotNone(static_ret) paddle.disable_static() + breakpoint() np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05) From 9adb7c4b58a4a17d9e917566e6b40955485d34f4 Mon Sep 17 00:00:00 2001 From: 0x45f Date: Wed, 1 Nov 2023 12:28:36 +0000 Subject: [PATCH 5/5] Refine code --- test/legacy_test/test_cross_entropy_loss.py | 1 - 1 file changed, 1 deletion(-) diff --git a/test/legacy_test/test_cross_entropy_loss.py b/test/legacy_test/test_cross_entropy_loss.py index f9a2bda323514..5c50ceab5dbe6 100644 --- a/test/legacy_test/test_cross_entropy_loss.py +++ b/test/legacy_test/test_cross_entropy_loss.py @@ -544,7 +544,6 @@ def test_cross_entropy_loss_soft_1d_mean(self): self.assertIsNotNone(static_ret) paddle.disable_static() - breakpoint() np.testing.assert_allclose(static_ret[0], expected, rtol=1e-05) np.testing.assert_allclose(dy_ret_value, expected, rtol=1e-05)