Skip to content

Commit

Permalink
[Auto Parallel]Add Embedding flops (#47978)
Browse files Browse the repository at this point in the history
* c_embedding

* add annotations

* add annotations

* revision

* revise attrs
  • Loading branch information
CjhHa1 authored Dec 1, 2022
1 parent 9218e74 commit 08c5f4c
Show file tree
Hide file tree
Showing 2 changed files with 13 additions and 0 deletions.
4 changes: 4 additions & 0 deletions python/paddle/fluid/tests/unittests/test_profiler.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,6 +297,10 @@ def test_flops(self):
self.assertTrue(
flops('softmax', {'X': [[12, 12, 12]]}, {}) == 3 * 12 * 12 * 12
)
self.assertTrue(
flops('c_embedding', {'Ids': [[12, 12]], 'W': [[12, 12, 3]]}, {})
== 0
)


if __name__ == '__main__':
Expand Down
9 changes: 9 additions & 0 deletions python/paddle/utils/flops.py
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,15 @@ def register(func):
return register


@register_flops("c_embedding")
def _c_embedding_flops(input_shapes, attrs):
"""FLOPs computation for c_embedding op.
For c_embedding(input):
equation: flops = 0
"""
return 0


@register_flops("dropout")
def _dropout_flops(input_shapes, attrs):
"""FLOPs computation for dropout op.
Expand Down

0 comments on commit 08c5f4c

Please sign in to comment.