Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Keras][Bugfix] fix a bug about alpha attribute in LeakyReLU which lead to passes conflict #14707

Merged
merged 10 commits into from
Apr 28, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions python/tvm/relay/frontend/keras.py
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,8 @@ def _convert_advanced_activation(inexpr, keras_layer, etab, data_layout, input_s
return _op.multiply(negative_slope, _op.subtract(inexpr, threshold))
return _op.nn.relu(inexpr)
if act_type == "LeakyReLU":
if np.isnan(keras_layer.alpha).any():
raise tvm.error.OpAttributeInvalid("The alpha value of a LeakyReLU cannot be None.")
return _op.nn.leaky_relu(inexpr, alpha=float(keras_layer.alpha))
if act_type == "ELU":
alpha = keras_layer.alpha if hasattr(keras_layer, "alpha") else 1.0
Expand Down
19 changes: 19 additions & 0 deletions tests/python/frontend/keras/test_forward.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,11 +15,13 @@
# specific language governing permissions and limitations
# under the License.
"""Unit tests for various models and operators"""
from packaging import version as package_version
import numpy as np
import tvm
from tvm import relay
from tvm.contrib import graph_executor
import tvm.testing
import pytest

try:
import tensorflow.compat.v1 as tf
Expand Down Expand Up @@ -210,6 +212,22 @@ def test_forward_activations(self, keras_mod):
verify_keras_frontend(keras_model)
verify_keras_frontend(keras_model, need_transpose=False, layout="NHWC")

def test_forward_activations_except(self, keras_mod):
"""
test invalid attribute alpha=None for LeakyReLU.
after version 2.3.1 in keras, checking was added to reject the invalid api call:
LeakyReLU(alpha=None), (issue: https://github.com/tensorflow/tensorflow/pull/47017)
Thus, it's necessary to check the keras version to avoid crash at LeakyReLU(alpha=None)
"""
if package_version.parse(keras_mod.__version__.split("-tf")[0]) <= package_version.parse(
"2.3.1"
):
echuraev marked this conversation as resolved.
Show resolved Hide resolved
data = keras_mod.layers.Input(shape=(2, 3, 4))
layer = keras_mod.layers.LeakyReLU(alpha=None)(data)
keras_model = keras_mod.models.Model(data, layer)
with pytest.raises(tvm.error.OpAttributeInvalid):
verify_keras_frontend(keras_model)

def test_forward_dense(self, keras_mod):
"""test_forward_dense"""
data = keras_mod.layers.Input(shape=(32, 32, 1))
Expand Down Expand Up @@ -749,6 +767,7 @@ def test_forward_time_distributed(self, keras_mod):
sut.test_forward_merge_dot(keras_mod=k)
sut.test_forward_merge(keras_mod=k)
sut.test_forward_activations(keras_mod=k)
sut.test_forward_activations_except(keras_mod=k)
sut.test_forward_dense(keras_mod=k)
sut.test_forward_permute(keras_mod=k)
sut.test_forward_sequential(keras_mod=k)
Expand Down