Skip to content

Commit

Permalink
add autogen code support for margin_cross_entropy (#52130)
Browse files Browse the repository at this point in the history
  • Loading branch information
GreatV authored Mar 28, 2023
1 parent ad9b88a commit 8c8c6d9
Show file tree
Hide file tree
Showing 7 changed files with 28 additions and 221 deletions.
146 changes: 0 additions & 146 deletions paddle/fluid/operators/margin_cross_entropy_op.cc

This file was deleted.

11 changes: 11 additions & 0 deletions paddle/phi/api/yaml/backward.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -932,6 +932,17 @@
kernel :
func : lu_unpack_grad

- backward_op : margin_cross_entropy_grad
forward : margin_cross_entropy (Tensor logits, Tensor label, bool return_softmax=false, int ring_id=0, int rank=0, int nranks=1, float margin1=1.0f, float margin2=0.5f, float margin3=0.0f, float scale=64.0f) -> Tensor(softmax), Tensor(loss)
args : (Tensor logits, Tensor label, Tensor softmax, Tensor loss_grad, bool return_softmax, int ring_id, int rank, int nranks, float margin1, float margin2, float margin3, float scale)
output : Tensor(logits_grad)
infer_meta :
func : MarginCrossEntropyGradInferMeta
kernel :
func : margin_cross_entropy_grad
data_type : softmax
inplace : (softmax -> logits_grad)

- backward_op : masked_select_grad
forward : masked_select (Tensor x, Tensor mask) -> Tensor(out)
args : (Tensor x, Tensor mask, Tensor out_grad)
Expand Down
11 changes: 0 additions & 11 deletions paddle/phi/api/yaml/legacy_backward.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -651,17 +651,6 @@
kernel :
func : lu_grad

- backward_op : margin_cross_entropy_grad
forward : margin_cross_entropy (Tensor logits, Tensor label, bool return_softmax, int ring_id, int rank, int nranks, float margin1, float margin2, float margin3, float scale) -> Tensor(softmax), Tensor(loss)
args : (Tensor logits, Tensor label, Tensor softmax, Tensor loss_grad, bool return_softmax, int ring_id, int rank, int nranks, float margin1, float margin2, float margin3, float scale)
output : Tensor(logits_grad)
infer_meta :
func : MarginCrossEntropyGradInferMeta
kernel :
func : margin_cross_entropy_grad
data_type : softmax
inplace : (softmax -> logits_grad)

- backward_op : matmul_double_grad
forward : matmul_grad (Tensor x, Tensor y, Tensor grad_out, bool transpose_x=false, bool transpose_y=false) -> Tensor(grad_x), Tensor(grad_y)
args : (Tensor x, Tensor y, Tensor grad_out, Tensor grad_x_grad, Tensor grad_y_grad, bool transpose_x=false, bool transpose_y=false)
Expand Down
10 changes: 0 additions & 10 deletions paddle/phi/api/yaml/legacy_ops.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -972,16 +972,6 @@
func : lu
backward : lu_grad

- op : margin_cross_entropy
args : (Tensor logits, Tensor label, bool return_softmax, int ring_id, int rank, int nranks, float margin1, float margin2, float margin3, float scale)
output : Tensor(softmax), Tensor(loss)
infer_meta :
func : MarginCrossEntropyInferMeta
kernel :
func : margin_cross_entropy
data_type : logits
backward : margin_cross_entropy_grad

- op : matmul
args : (Tensor x, Tensor y, bool transpose_x = false, bool transpose_y = false)
output : Tensor
Expand Down
7 changes: 7 additions & 0 deletions paddle/phi/api/yaml/op_compat.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -1137,6 +1137,13 @@
outputs :
{pmat : Pmat, l : L, u : U}

- op : margin_cross_entropy
backward : margin_cross_entropy_grad
inputs:
{logits : Logits, label : Label}
outputs:
{softmax : Softmax, loss : Loss}

- op : masked_select
inputs :
{x : X, mask : Mask}
Expand Down
10 changes: 10 additions & 0 deletions paddle/phi/api/yaml/ops.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -933,6 +933,16 @@
data_type : x
backward : lu_unpack_grad

- op : margin_cross_entropy
args : (Tensor logits, Tensor label, bool return_softmax = false, int ring_id = 0, int rank = 0, int nranks = 1, float margin1 = 1.0f, float margin2 = 0.5f, float margin3 = 0.0f, float scale = 64.0f)
output : Tensor(softmax), Tensor(loss)
infer_meta :
func : MarginCrossEntropyInferMeta
kernel :
func : margin_cross_entropy
data_type : logits
backward : margin_cross_entropy_grad

- op : masked_select
args : (Tensor x, Tensor mask)
output : Tensor (out)
Expand Down
54 changes: 0 additions & 54 deletions paddle/phi/ops/compat/margin_cross_entropy_sig.cc

This file was deleted.

0 comments on commit 8c8c6d9

Please sign in to comment.