From cfeeae1578e78123a49fcc08b2040fd4d31d0584 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 11:55:10 +0000 Subject: [PATCH 01/13] Added public functions to register everything. --- tensorflow_addons/BUILD | 14 ++++++ tensorflow_addons/__init__.py | 1 + tensorflow_addons/register.py | 70 ++++++++++++++++++++++++++++++ tensorflow_addons/register_test.py | 15 +++++++ 4 files changed, 100 insertions(+) create mode 100644 tensorflow_addons/register.py create mode 100644 tensorflow_addons/register_test.py diff --git a/tensorflow_addons/BUILD b/tensorflow_addons/BUILD index 6b0c7a4f72..9aeb1a513b 100644 --- a/tensorflow_addons/BUILD +++ b/tensorflow_addons/BUILD @@ -11,6 +11,7 @@ py_library( name = "tensorflow_addons", data = [ "__init__.py", + "register.py", "version.py", ], deps = [ @@ -24,5 +25,18 @@ py_library( "//tensorflow_addons/rnn", "//tensorflow_addons/seq2seq", "//tensorflow_addons/text", + "//tensorflow_addons/utils", + ], +) + +py_test( + name = "register_test", + size = "small", + srcs = [ + "register_test.py", + ], + main = "register_test.py", + deps = [ + ":tensorflow_addons", ], ) diff --git a/tensorflow_addons/__init__.py b/tensorflow_addons/__init__.py index c4706b0d87..d5527ecb6e 100644 --- a/tensorflow_addons/__init__.py +++ b/tensorflow_addons/__init__.py @@ -28,5 +28,6 @@ from tensorflow_addons import rnn from tensorflow_addons import seq2seq from tensorflow_addons import text +from tensorflow_addons.register import register_all from tensorflow_addons.version import __version__ diff --git a/tensorflow_addons/register.py b/tensorflow_addons/register.py new file mode 100644 index 0000000000..5f7202ca40 --- /dev/null +++ b/tensorflow_addons/register.py @@ -0,0 +1,70 @@ +import inspect +import glob +import tensorflow as tf +from tensorflow.keras.utils import register_keras_serializable +import warnings +from tensorflow_addons import ( + activations, + callbacks, + image, + layers, + losses, + metrics, + optimizers, + rnn, + seq2seq, +) +from tensorflow_addons.utils.resource_loader import get_project_root +import os + +SUBMODULES = [ + activations, + callbacks, + image, + layers, + losses, + metrics, + optimizers, + rnn, + seq2seq, +] + +already_registered = False + + +def register_all(keras_objects: bool = True, custom_kernels: bool = True) -> None: + if keras_objects: + register_keras_objects() + if custom_kernels: + register_custom_kernels() + + +def register_keras_objects() -> None: + global already_registered + if already_registered: + warnings.warn( + "Tensorflow Addons' functions and classes are already " + "registered in the Keras custom objects dictionary.", + UserWarning, + ) + for module in SUBMODULES: + for attribute in _get_attributes(module): + if inspect.isclass(attribute) or inspect.isfunction(attribute): + register_keras_serializable(package="Addons")(attribute) + + already_registered = True + + +def register_custom_kernels() -> None: + custom_ops_dir = os.path.join(get_project_root(), "custom_ops") + all_shared_objects = glob.glob(custom_ops_dir + "/**/*.so", recursive=True) + for shared_object in all_shared_objects: + tf.load_op_library(shared_object) + + +def _get_attributes(module): + for attr_name in dir(module): + if attr_name.startswith("_"): + continue + attr = getattr(module, attr_name) + yield attr diff --git a/tensorflow_addons/register_test.py b/tensorflow_addons/register_test.py new file mode 100644 index 0000000000..899f86b1c4 --- /dev/null +++ b/tensorflow_addons/register_test.py @@ -0,0 +1,15 @@ +import unittest +from tensorflow_addons.register import register_all + + +class AssertRNNCellTest(unittest.TestCase): + def setUp(self): + pass + + def test_multiple_register(self): + register_all() + register_all() + + +if __name__ == "__main__": + unittest.main() From ebea5bdb986b92613eeabef87f1dc9b4d90ebfee Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 11:59:47 +0000 Subject: [PATCH 02/13] Removed decorator --- tensorflow_addons/activations/gelu.py | 1 - tensorflow_addons/activations/hardshrink.py | 1 - tensorflow_addons/activations/lisht.py | 1 - tensorflow_addons/activations/mish.py | 1 - tensorflow_addons/activations/rrelu.py | 1 - tensorflow_addons/activations/softshrink.py | 1 - tensorflow_addons/activations/sparsemax.py | 1 - tensorflow_addons/activations/tanhshrink.py | 1 - tensorflow_addons/callbacks/time_stopping.py | 2 -- tensorflow_addons/callbacks/tqdm_progress_bar.py | 2 -- tensorflow_addons/layers/gelu.py | 1 - tensorflow_addons/layers/maxout.py | 1 - tensorflow_addons/layers/normalizations.py | 2 -- tensorflow_addons/layers/optical_flow.py | 1 - tensorflow_addons/layers/poincare.py | 1 - tensorflow_addons/layers/polynomial.py | 1 - tensorflow_addons/layers/sparsemax.py | 1 - tensorflow_addons/layers/tlu.py | 1 - tensorflow_addons/layers/wrappers.py | 1 - tensorflow_addons/losses/contrastive.py | 2 -- tensorflow_addons/losses/focal_loss.py | 2 -- tensorflow_addons/losses/giou_loss.py | 2 -- tensorflow_addons/losses/lifted.py | 2 -- tensorflow_addons/losses/npairs.py | 4 ---- tensorflow_addons/losses/quantiles.py | 2 -- tensorflow_addons/losses/sparsemax_loss.py | 3 --- tensorflow_addons/losses/triplet.py | 4 ---- tensorflow_addons/metrics/cohens_kappa.py | 1 - tensorflow_addons/metrics/f_scores.py | 2 -- tensorflow_addons/metrics/matthews_correlation_coefficient.py | 1 - tensorflow_addons/optimizers/conditional_gradient.py | 1 - tensorflow_addons/optimizers/cyclical_learning_rate.py | 4 ---- tensorflow_addons/optimizers/lamb.py | 1 - tensorflow_addons/optimizers/lazy_adam.py | 1 - tensorflow_addons/optimizers/lookahead.py | 1 - tensorflow_addons/optimizers/moving_average.py | 1 - tensorflow_addons/optimizers/novograd.py | 1 - tensorflow_addons/optimizers/rectified_adam.py | 1 - tensorflow_addons/optimizers/stochastic_weight_averaging.py | 1 - tensorflow_addons/optimizers/weight_decay_optimizers.py | 2 -- tensorflow_addons/optimizers/yogi.py | 1 - tensorflow_addons/rnn/cell.py | 3 --- 42 files changed, 65 deletions(-) diff --git a/tensorflow_addons/activations/gelu.py b/tensorflow_addons/activations/gelu.py index e82c58a03d..54e4c8c9f8 100644 --- a/tensorflow_addons/activations/gelu.py +++ b/tensorflow_addons/activations/gelu.py @@ -22,7 +22,6 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") -@tf.keras.utils.register_keras_serializable(package="Addons") def gelu(x: types.TensorLike, approximate: bool = True) -> tf.Tensor: """Gaussian Error Linear Unit. diff --git a/tensorflow_addons/activations/hardshrink.py b/tensorflow_addons/activations/hardshrink.py index 7817e60241..c45eaedcf0 100644 --- a/tensorflow_addons/activations/hardshrink.py +++ b/tensorflow_addons/activations/hardshrink.py @@ -22,7 +22,6 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") -@tf.keras.utils.register_keras_serializable(package="Addons") def hardshrink( x: types.TensorLike, lower: Number = -0.5, upper: Number = 0.5 ) -> tf.Tensor: diff --git a/tensorflow_addons/activations/lisht.py b/tensorflow_addons/activations/lisht.py index a4cb7febde..3eb7c691aa 100644 --- a/tensorflow_addons/activations/lisht.py +++ b/tensorflow_addons/activations/lisht.py @@ -21,7 +21,6 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") -@tf.keras.utils.register_keras_serializable(package="Addons") def lisht(x: types.TensorLike) -> tf.Tensor: """LiSHT: Non-Parameteric Linearly Scaled Hyperbolic Tangent Activation Function. diff --git a/tensorflow_addons/activations/mish.py b/tensorflow_addons/activations/mish.py index 2a5e5a8f05..fae7a267b5 100644 --- a/tensorflow_addons/activations/mish.py +++ b/tensorflow_addons/activations/mish.py @@ -21,7 +21,6 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") -@tf.keras.utils.register_keras_serializable(package="Addons") def mish(x: types.TensorLike) -> tf.Tensor: """Mish: A Self Regularized Non-Monotonic Neural Activation Function. diff --git a/tensorflow_addons/activations/rrelu.py b/tensorflow_addons/activations/rrelu.py index 960bd7f2d1..c69c04f2f1 100644 --- a/tensorflow_addons/activations/rrelu.py +++ b/tensorflow_addons/activations/rrelu.py @@ -19,7 +19,6 @@ from typing import Optional -@tf.keras.utils.register_keras_serializable(package="Addons") def rrelu( x: types.TensorLike, lower: Number = 0.125, diff --git a/tensorflow_addons/activations/softshrink.py b/tensorflow_addons/activations/softshrink.py index 238cc19036..204f8f074c 100644 --- a/tensorflow_addons/activations/softshrink.py +++ b/tensorflow_addons/activations/softshrink.py @@ -22,7 +22,6 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") -@tf.keras.utils.register_keras_serializable(package="Addons") def softshrink( x: types.TensorLike, lower: Number = -0.5, upper: Number = 0.5 ) -> tf.Tensor: diff --git a/tensorflow_addons/activations/sparsemax.py b/tensorflow_addons/activations/sparsemax.py index 1dec26d58c..8c098d4100 100644 --- a/tensorflow_addons/activations/sparsemax.py +++ b/tensorflow_addons/activations/sparsemax.py @@ -18,7 +18,6 @@ from tensorflow_addons.utils import types -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def sparsemax(logits: types.TensorLike, axis: int = -1) -> tf.Tensor: """Sparsemax activation function [1]. diff --git a/tensorflow_addons/activations/tanhshrink.py b/tensorflow_addons/activations/tanhshrink.py index 0c78b78074..217a10f9a3 100644 --- a/tensorflow_addons/activations/tanhshrink.py +++ b/tensorflow_addons/activations/tanhshrink.py @@ -21,7 +21,6 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") -@tf.keras.utils.register_keras_serializable(package="Addons") def tanhshrink(x: types.TensorLike) -> tf.Tensor: """Applies the element-wise function: x - tanh(x) diff --git a/tensorflow_addons/callbacks/time_stopping.py b/tensorflow_addons/callbacks/time_stopping.py index 5198a23322..823a58cfe2 100644 --- a/tensorflow_addons/callbacks/time_stopping.py +++ b/tensorflow_addons/callbacks/time_stopping.py @@ -18,11 +18,9 @@ import time from typeguard import typechecked -import tensorflow as tf from tensorflow.keras.callbacks import Callback -@tf.keras.utils.register_keras_serializable(package="Addons") class TimeStopping(Callback): """Stop training when a specified amount of time has passed. diff --git a/tensorflow_addons/callbacks/tqdm_progress_bar.py b/tensorflow_addons/callbacks/tqdm_progress_bar.py index 94151d3e5f..d2cf7974a4 100644 --- a/tensorflow_addons/callbacks/tqdm_progress_bar.py +++ b/tensorflow_addons/callbacks/tqdm_progress_bar.py @@ -15,14 +15,12 @@ """TQDM Progress Bar.""" import time -import tensorflow as tf from collections import defaultdict from typeguard import typechecked from tensorflow.keras.callbacks import Callback -@tf.keras.utils.register_keras_serializable(package="Addons") class TQDMProgressBar(Callback): """TQDM Progress Bar for Tensorflow Keras. diff --git a/tensorflow_addons/layers/gelu.py b/tensorflow_addons/layers/gelu.py index d9407cd87e..4a2b49c174 100644 --- a/tensorflow_addons/layers/gelu.py +++ b/tensorflow_addons/layers/gelu.py @@ -19,7 +19,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class GELU(tf.keras.layers.Layer): """Gaussian Error Linear Unit. diff --git a/tensorflow_addons/layers/maxout.py b/tensorflow_addons/layers/maxout.py index b9507e142a..b4fd5745cf 100644 --- a/tensorflow_addons/layers/maxout.py +++ b/tensorflow_addons/layers/maxout.py @@ -18,7 +18,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class Maxout(tf.keras.layers.Layer): """Applies Maxout to the input. diff --git a/tensorflow_addons/layers/normalizations.py b/tensorflow_addons/layers/normalizations.py index 7322701f5f..2895e5d647 100644 --- a/tensorflow_addons/layers/normalizations.py +++ b/tensorflow_addons/layers/normalizations.py @@ -22,7 +22,6 @@ from tensorflow_addons.utils import types -@tf.keras.utils.register_keras_serializable(package="Addons") class GroupNormalization(tf.keras.layers.Layer): """Group normalization layer. @@ -278,7 +277,6 @@ def _create_broadcast_shape(self, input_shape): return broadcast_shape -@tf.keras.utils.register_keras_serializable(package="Addons") class InstanceNormalization(GroupNormalization): """Instance normalization layer. diff --git a/tensorflow_addons/layers/optical_flow.py b/tensorflow_addons/layers/optical_flow.py index fe65f1d59c..4fee7a02ca 100644 --- a/tensorflow_addons/layers/optical_flow.py +++ b/tensorflow_addons/layers/optical_flow.py @@ -138,7 +138,6 @@ def _correlation_cost_grad(op, grad_output): return [grad_input_a, grad_input_b] -@tf.keras.utils.register_keras_serializable(package="Addons") class CorrelationCost(tf.keras.layers.Layer): """Correlation Cost Layer. diff --git a/tensorflow_addons/layers/poincare.py b/tensorflow_addons/layers/poincare.py index 4f96d92f0c..b6a3a11c6b 100644 --- a/tensorflow_addons/layers/poincare.py +++ b/tensorflow_addons/layers/poincare.py @@ -18,7 +18,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class PoincareNormalize(tf.keras.layers.Layer): """Project into the Poincare ball with norm <= 1.0 - epsilon. diff --git a/tensorflow_addons/layers/polynomial.py b/tensorflow_addons/layers/polynomial.py index 4b789ae0fc..5dc0470bb6 100644 --- a/tensorflow_addons/layers/polynomial.py +++ b/tensorflow_addons/layers/polynomial.py @@ -20,7 +20,6 @@ from tensorflow_addons.utils import types -@tf.keras.utils.register_keras_serializable(package="Addons") class PolynomialCrossing(tf.keras.layers.Layer): """Layer for Deep & Cross Network to learn explicit feature interactions. diff --git a/tensorflow_addons/layers/sparsemax.py b/tensorflow_addons/layers/sparsemax.py index fd3ccf8ba3..16f9c374f1 100644 --- a/tensorflow_addons/layers/sparsemax.py +++ b/tensorflow_addons/layers/sparsemax.py @@ -18,7 +18,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class Sparsemax(tf.keras.layers.Layer): """Sparsemax activation function [1]. diff --git a/tensorflow_addons/layers/tlu.py b/tensorflow_addons/layers/tlu.py index 7698e6d602..9b4e54dd48 100644 --- a/tensorflow_addons/layers/tlu.py +++ b/tensorflow_addons/layers/tlu.py @@ -20,7 +20,6 @@ from tensorflow_addons.utils import types -@tf.keras.utils.register_keras_serializable(package="Addons") class TLU(tf.keras.layers.Layer): """Thresholded Linear Unit. An activation function which is similar to ReLU but with a learned threshold that benefits models using FRN(Filter Response diff --git a/tensorflow_addons/layers/wrappers.py b/tensorflow_addons/layers/wrappers.py index 418cf1657b..0978e82568 100644 --- a/tensorflow_addons/layers/wrappers.py +++ b/tensorflow_addons/layers/wrappers.py @@ -19,7 +19,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class WeightNormalization(tf.keras.layers.Wrapper): """This wrapper reparameterizes a layer by decoupling the weight's magnitude and direction. diff --git a/tensorflow_addons/losses/contrastive.py b/tensorflow_addons/losses/contrastive.py index 3b4054c434..c4e67824fc 100644 --- a/tensorflow_addons/losses/contrastive.py +++ b/tensorflow_addons/losses/contrastive.py @@ -20,7 +20,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def contrastive_loss( y_true: TensorLike, y_pred: TensorLike, margin: Number = 1.0 @@ -59,7 +58,6 @@ def contrastive_loss( ) -@tf.keras.utils.register_keras_serializable(package="Addons") class ContrastiveLoss(tf.keras.losses.Loss): r"""Computes the contrastive loss between `y_true` and `y_pred`. diff --git a/tensorflow_addons/losses/focal_loss.py b/tensorflow_addons/losses/focal_loss.py index 08f5ee9c17..74d60f3d60 100644 --- a/tensorflow_addons/losses/focal_loss.py +++ b/tensorflow_addons/losses/focal_loss.py @@ -21,7 +21,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class SigmoidFocalCrossEntropy(tf.keras.losses.Loss): """Implements the focal loss function. @@ -99,7 +98,6 @@ def get_config(self): return {**base_config, **config} -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def sigmoid_focal_crossentropy( y_true: TensorLike, diff --git a/tensorflow_addons/losses/giou_loss.py b/tensorflow_addons/losses/giou_loss.py index fa7f63d125..103ca6a188 100644 --- a/tensorflow_addons/losses/giou_loss.py +++ b/tensorflow_addons/losses/giou_loss.py @@ -21,7 +21,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class GIoULoss(tf.keras.losses.Loss): """Implements the GIoU loss function. @@ -72,7 +71,6 @@ def call(self, y_true, y_pred): return giou_loss(y_true, y_pred, mode=self.mode) -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def giou_loss(y_true: TensorLike, y_pred: TensorLike, mode: str = "giou") -> tf.Tensor: """ diff --git a/tensorflow_addons/losses/lifted.py b/tensorflow_addons/losses/lifted.py index b587cee99c..c3aa8fb4a3 100644 --- a/tensorflow_addons/losses/lifted.py +++ b/tensorflow_addons/losses/lifted.py @@ -22,7 +22,6 @@ from typing import Optional -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def lifted_struct_loss( labels: TensorLike, embeddings: TensorLike, margin: FloatTensorLike = 1.0 @@ -111,7 +110,6 @@ def lifted_struct_loss( return lifted_loss -@tf.keras.utils.register_keras_serializable(package="Addons") class LiftedStructLoss(tf.keras.losses.Loss): """Computes the lifted structured loss. diff --git a/tensorflow_addons/losses/npairs.py b/tensorflow_addons/losses/npairs.py index 1693123505..a1163d082e 100644 --- a/tensorflow_addons/losses/npairs.py +++ b/tensorflow_addons/losses/npairs.py @@ -20,7 +20,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def npairs_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: """Computes the npairs loss between `y_true` and `y_pred`. @@ -62,7 +61,6 @@ def npairs_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: return tf.math.reduce_mean(loss) -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def npairs_multilabel_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: r"""Computes the npairs loss between multilabel data `y_true` and `y_pred`. @@ -127,7 +125,6 @@ def npairs_multilabel_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: return tf.math.reduce_mean(loss) -@tf.keras.utils.register_keras_serializable(package="Addons") class NpairsLoss(tf.keras.losses.Loss): """Computes the npairs loss between `y_true` and `y_pred`. @@ -158,7 +155,6 @@ def call(self, y_true, y_pred): return npairs_loss(y_true, y_pred) -@tf.keras.utils.register_keras_serializable(package="Addons") class NpairsMultilabelLoss(tf.keras.losses.Loss): r"""Computes the npairs loss between multilabel data `y_true` and `y_pred`. diff --git a/tensorflow_addons/losses/quantiles.py b/tensorflow_addons/losses/quantiles.py index 44e02304f9..f61fc9cc77 100644 --- a/tensorflow_addons/losses/quantiles.py +++ b/tensorflow_addons/losses/quantiles.py @@ -19,7 +19,6 @@ from tensorflow_addons.utils.types import TensorLike, FloatTensorLike -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def pinball_loss( y_true: TensorLike, y_pred: TensorLike, tau: FloatTensorLike = 0.5 @@ -72,7 +71,6 @@ def pinball_loss( return tf.reduce_mean(tf.keras.backend.batch_flatten(pinball), axis=-1) -@tf.keras.utils.register_keras_serializable(package="Addons") class PinballLoss(tf.keras.losses.Loss): """Computes the pinball loss between `y_true` and `y_pred`. diff --git a/tensorflow_addons/losses/sparsemax_loss.py b/tensorflow_addons/losses/sparsemax_loss.py index 9cd6d4433a..b506e352bd 100644 --- a/tensorflow_addons/losses/sparsemax_loss.py +++ b/tensorflow_addons/losses/sparsemax_loss.py @@ -21,7 +21,6 @@ from typing import Optional -@tf.keras.utils.register_keras_serializable(package="Addons") def sparsemax_loss( logits: TensorLike, sparsemax: TensorLike, @@ -85,7 +84,6 @@ def sparsemax_loss( @tf.function -@tf.keras.utils.register_keras_serializable(package="Addons") def sparsemax_loss_from_logits( y_true: TensorLike, logits_pred: TensorLike ) -> tf.Tensor: @@ -94,7 +92,6 @@ def sparsemax_loss_from_logits( return loss -@tf.keras.utils.register_keras_serializable(package="Addons") class SparsemaxLoss(tf.keras.losses.Loss): """Sparsemax loss function. diff --git a/tensorflow_addons/losses/triplet.py b/tensorflow_addons/losses/triplet.py index c78d697298..41267535cf 100644 --- a/tensorflow_addons/losses/triplet.py +++ b/tensorflow_addons/losses/triplet.py @@ -65,7 +65,6 @@ def _masked_minimum(data, mask, dim=1): return masked_minimums -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def triplet_semihard_loss( y_true: TensorLike, y_pred: TensorLike, margin: FloatTensorLike = 1.0 @@ -147,7 +146,6 @@ def triplet_semihard_loss( return triplet_loss -@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def triplet_hard_loss( y_true: TensorLike, @@ -203,7 +201,6 @@ def triplet_hard_loss( return triplet_loss -@tf.keras.utils.register_keras_serializable(package="Addons") class TripletSemiHardLoss(tf.keras.losses.Loss): """Computes the triplet loss with semi-hard negative mining. @@ -241,7 +238,6 @@ def get_config(self): return {**base_config, **config} -@tf.keras.utils.register_keras_serializable(package="Addons") class TripletHardLoss(tf.keras.losses.Loss): """Computes the triplet loss with hard negative and hard positive mining. diff --git a/tensorflow_addons/metrics/cohens_kappa.py b/tensorflow_addons/metrics/cohens_kappa.py index 48631e5f07..f250beece1 100644 --- a/tensorflow_addons/metrics/cohens_kappa.py +++ b/tensorflow_addons/metrics/cohens_kappa.py @@ -24,7 +24,6 @@ from typing import Optional -@tf.keras.utils.register_keras_serializable(package="Addons") class CohenKappa(Metric): """Computes Kappa score between two raters. diff --git a/tensorflow_addons/metrics/f_scores.py b/tensorflow_addons/metrics/f_scores.py index 189b2fecc9..853de0b7e3 100755 --- a/tensorflow_addons/metrics/f_scores.py +++ b/tensorflow_addons/metrics/f_scores.py @@ -21,7 +21,6 @@ from typing import Optional -@tf.keras.utils.register_keras_serializable(package="Addons") class FBetaScore(tf.keras.metrics.Metric): """Computes F-Beta score. @@ -189,7 +188,6 @@ def reset_states(self): self.weights_intermediate.assign(tf.zeros(self.init_shape, self.dtype)) -@tf.keras.utils.register_keras_serializable(package="Addons") class F1Score(FBetaScore): """Computes F-1 Score. diff --git a/tensorflow_addons/metrics/matthews_correlation_coefficient.py b/tensorflow_addons/metrics/matthews_correlation_coefficient.py index b49aff4066..621afe7aaa 100644 --- a/tensorflow_addons/metrics/matthews_correlation_coefficient.py +++ b/tensorflow_addons/metrics/matthews_correlation_coefficient.py @@ -20,7 +20,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class MatthewsCorrelationCoefficient(tf.keras.metrics.Metric): """Computes the Matthews Correlation Coefficient. diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 1fdbc26a71..4a2bc85cb3 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -21,7 +21,6 @@ from typing import Union, Callable -@tf.keras.utils.register_keras_serializable(package="Addons") class ConditionalGradient(tf.keras.optimizers.Optimizer): """Optimizer that implements the Conditional Gradient optimization. diff --git a/tensorflow_addons/optimizers/cyclical_learning_rate.py b/tensorflow_addons/optimizers/cyclical_learning_rate.py index 9d9bdf1285..481ea4348c 100644 --- a/tensorflow_addons/optimizers/cyclical_learning_rate.py +++ b/tensorflow_addons/optimizers/cyclical_learning_rate.py @@ -21,7 +21,6 @@ from typing import Union, Callable -@tf.keras.utils.register_keras_serializable(package="Addons") class CyclicalLearningRate(tf.keras.optimizers.schedules.LearningRateSchedule): """A LearningRateSchedule that uses cyclical schedule.""" @@ -109,7 +108,6 @@ def get_config(self): } -@tf.keras.utils.register_keras_serializable(package="Addons") class TriangularCyclicalLearningRate(CyclicalLearningRate): @typechecked def __init__( @@ -170,7 +168,6 @@ def __init__( ) -@tf.keras.utils.register_keras_serializable(package="Addons") class Triangular2CyclicalLearningRate(CyclicalLearningRate): @typechecked def __init__( @@ -231,7 +228,6 @@ def __init__( ) -@tf.keras.utils.register_keras_serializable(package="Addons") class ExponentialCyclicalLearningRate(CyclicalLearningRate): @typechecked def __init__( diff --git a/tensorflow_addons/optimizers/lamb.py b/tensorflow_addons/optimizers/lamb.py index d5a5807048..597045de73 100644 --- a/tensorflow_addons/optimizers/lamb.py +++ b/tensorflow_addons/optimizers/lamb.py @@ -26,7 +26,6 @@ from tensorflow_addons.utils.types import FloatTensorLike -@tf.keras.utils.register_keras_serializable(package="Addons") class LAMB(tf.keras.optimizers.Optimizer): """Optimizer that implements the Layer-wise Adaptive Moments (LAMB). diff --git a/tensorflow_addons/optimizers/lazy_adam.py b/tensorflow_addons/optimizers/lazy_adam.py index 869cec5b08..0221ae7cba 100644 --- a/tensorflow_addons/optimizers/lazy_adam.py +++ b/tensorflow_addons/optimizers/lazy_adam.py @@ -27,7 +27,6 @@ from typing import Union, Callable -@tf.keras.utils.register_keras_serializable(package="Addons") class LazyAdam(tf.keras.optimizers.Adam): """Variant of the Adam optimizer that handles sparse updates more efficiently. diff --git a/tensorflow_addons/optimizers/lookahead.py b/tensorflow_addons/optimizers/lookahead.py index 7d63e757e0..721e3295ce 100644 --- a/tensorflow_addons/optimizers/lookahead.py +++ b/tensorflow_addons/optimizers/lookahead.py @@ -20,7 +20,6 @@ from typing import Union -@tf.keras.utils.register_keras_serializable(package="Addons") class Lookahead(tf.keras.optimizers.Optimizer): """This class allows to extend optimizers with the lookahead mechanism. diff --git a/tensorflow_addons/optimizers/moving_average.py b/tensorflow_addons/optimizers/moving_average.py index 5d3ccaa0e9..e7096b8b9a 100644 --- a/tensorflow_addons/optimizers/moving_average.py +++ b/tensorflow_addons/optimizers/moving_average.py @@ -22,7 +22,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class MovingAverage(AveragedOptimizerWrapper): """Optimizer that computes a moving average of the variables. diff --git a/tensorflow_addons/optimizers/novograd.py b/tensorflow_addons/optimizers/novograd.py index f9edb7f6e5..04453ac5cb 100644 --- a/tensorflow_addons/optimizers/novograd.py +++ b/tensorflow_addons/optimizers/novograd.py @@ -24,7 +24,6 @@ from tensorflow.python.training import training_ops -@tf.keras.utils.register_keras_serializable(package="Addons") class NovoGrad(tf.keras.optimizers.Optimizer): """The NovoGrad Optimizer was first proposed in [Stochastic Gradient Methods with Layerwise Adaptvie Moments for training of Deep diff --git a/tensorflow_addons/optimizers/rectified_adam.py b/tensorflow_addons/optimizers/rectified_adam.py index ee2c29efb7..8a3c63c39d 100644 --- a/tensorflow_addons/optimizers/rectified_adam.py +++ b/tensorflow_addons/optimizers/rectified_adam.py @@ -21,7 +21,6 @@ from typeguard import typechecked -@tf.keras.utils.register_keras_serializable(package="Addons") class RectifiedAdam(tf.keras.optimizers.Optimizer): """Variant of the Adam optimizer whose adaptive learning rate is rectified so as to have a consistent variance. diff --git a/tensorflow_addons/optimizers/stochastic_weight_averaging.py b/tensorflow_addons/optimizers/stochastic_weight_averaging.py index baad30027d..c0d76e1588 100644 --- a/tensorflow_addons/optimizers/stochastic_weight_averaging.py +++ b/tensorflow_addons/optimizers/stochastic_weight_averaging.py @@ -30,7 +30,6 @@ from typing import Union -@tf.keras.utils.register_keras_serializable(package="Addons") class SWA(AveragedOptimizerWrapper): """This class extends optimizers with Stochastic Weight Averaging (SWA). diff --git a/tensorflow_addons/optimizers/weight_decay_optimizers.py b/tensorflow_addons/optimizers/weight_decay_optimizers.py index 1f5dbee9c4..ec81df38d8 100644 --- a/tensorflow_addons/optimizers/weight_decay_optimizers.py +++ b/tensorflow_addons/optimizers/weight_decay_optimizers.py @@ -257,7 +257,6 @@ def __init__( return OptimizerWithDecoupledWeightDecay -@tf.keras.utils.register_keras_serializable(package="Addons") class SGDW(DecoupledWeightDecayExtension, tf.keras.optimizers.SGD): """Optimizer that implements the Momentum algorithm with weight_decay. @@ -335,7 +334,6 @@ def __init__( ) -@tf.keras.utils.register_keras_serializable(package="Addons") class AdamW(DecoupledWeightDecayExtension, tf.keras.optimizers.Adam): """Optimizer that implements the Adam algorithm with weight decay. diff --git a/tensorflow_addons/optimizers/yogi.py b/tensorflow_addons/optimizers/yogi.py index 09db073d1d..69d20c0b7a 100644 --- a/tensorflow_addons/optimizers/yogi.py +++ b/tensorflow_addons/optimizers/yogi.py @@ -49,7 +49,6 @@ def _solve(a, b, c): return w -@tf.keras.utils.register_keras_serializable(package="Addons") class Yogi(tf.keras.optimizers.Optimizer): """Optimizer that implements the Yogi algorithm in Keras. diff --git a/tensorflow_addons/rnn/cell.py b/tensorflow_addons/rnn/cell.py index a8b4c23840..4076e365a5 100644 --- a/tensorflow_addons/rnn/cell.py +++ b/tensorflow_addons/rnn/cell.py @@ -29,7 +29,6 @@ from typing import Optional -@tf.keras.utils.register_keras_serializable(package="Addons") class NASCell(keras.layers.AbstractRNNCell): """Neural Architecture Search (NAS) recurrent network cell. @@ -226,7 +225,6 @@ def get_config(self): return {**base_config, **config} -@tf.keras.utils.register_keras_serializable(package="Addons") class LayerNormLSTMCell(keras.layers.LSTMCell): """LSTM cell with layer normalization and recurrent dropout. @@ -387,7 +385,6 @@ def _create_norm_layer(self, name): ) -@tf.keras.utils.register_keras_serializable(package="Addons") class LayerNormSimpleRNNCell(keras.layers.SimpleRNNCell): """Cell class for LayerNormSimpleRNN. From 0f566af9a30ea5d0779ba174968240a4a3d52d06 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 12:06:31 +0000 Subject: [PATCH 03/13] Revert "Removed decorator" This reverts commit ebea5bdb986b92613eeabef87f1dc9b4d90ebfee. --- tensorflow_addons/activations/gelu.py | 1 + tensorflow_addons/activations/hardshrink.py | 1 + tensorflow_addons/activations/lisht.py | 1 + tensorflow_addons/activations/mish.py | 1 + tensorflow_addons/activations/rrelu.py | 1 + tensorflow_addons/activations/softshrink.py | 1 + tensorflow_addons/activations/sparsemax.py | 1 + tensorflow_addons/activations/tanhshrink.py | 1 + tensorflow_addons/callbacks/time_stopping.py | 2 ++ tensorflow_addons/callbacks/tqdm_progress_bar.py | 2 ++ tensorflow_addons/layers/gelu.py | 1 + tensorflow_addons/layers/maxout.py | 1 + tensorflow_addons/layers/normalizations.py | 2 ++ tensorflow_addons/layers/optical_flow.py | 1 + tensorflow_addons/layers/poincare.py | 1 + tensorflow_addons/layers/polynomial.py | 1 + tensorflow_addons/layers/sparsemax.py | 1 + tensorflow_addons/layers/tlu.py | 1 + tensorflow_addons/layers/wrappers.py | 1 + tensorflow_addons/losses/contrastive.py | 2 ++ tensorflow_addons/losses/focal_loss.py | 2 ++ tensorflow_addons/losses/giou_loss.py | 2 ++ tensorflow_addons/losses/lifted.py | 2 ++ tensorflow_addons/losses/npairs.py | 4 ++++ tensorflow_addons/losses/quantiles.py | 2 ++ tensorflow_addons/losses/sparsemax_loss.py | 3 +++ tensorflow_addons/losses/triplet.py | 4 ++++ tensorflow_addons/metrics/cohens_kappa.py | 1 + tensorflow_addons/metrics/f_scores.py | 2 ++ tensorflow_addons/metrics/matthews_correlation_coefficient.py | 1 + tensorflow_addons/optimizers/conditional_gradient.py | 1 + tensorflow_addons/optimizers/cyclical_learning_rate.py | 4 ++++ tensorflow_addons/optimizers/lamb.py | 1 + tensorflow_addons/optimizers/lazy_adam.py | 1 + tensorflow_addons/optimizers/lookahead.py | 1 + tensorflow_addons/optimizers/moving_average.py | 1 + tensorflow_addons/optimizers/novograd.py | 1 + tensorflow_addons/optimizers/rectified_adam.py | 1 + tensorflow_addons/optimizers/stochastic_weight_averaging.py | 1 + tensorflow_addons/optimizers/weight_decay_optimizers.py | 2 ++ tensorflow_addons/optimizers/yogi.py | 1 + tensorflow_addons/rnn/cell.py | 3 +++ 42 files changed, 65 insertions(+) diff --git a/tensorflow_addons/activations/gelu.py b/tensorflow_addons/activations/gelu.py index 54e4c8c9f8..e82c58a03d 100644 --- a/tensorflow_addons/activations/gelu.py +++ b/tensorflow_addons/activations/gelu.py @@ -22,6 +22,7 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") +@tf.keras.utils.register_keras_serializable(package="Addons") def gelu(x: types.TensorLike, approximate: bool = True) -> tf.Tensor: """Gaussian Error Linear Unit. diff --git a/tensorflow_addons/activations/hardshrink.py b/tensorflow_addons/activations/hardshrink.py index c45eaedcf0..7817e60241 100644 --- a/tensorflow_addons/activations/hardshrink.py +++ b/tensorflow_addons/activations/hardshrink.py @@ -22,6 +22,7 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") +@tf.keras.utils.register_keras_serializable(package="Addons") def hardshrink( x: types.TensorLike, lower: Number = -0.5, upper: Number = 0.5 ) -> tf.Tensor: diff --git a/tensorflow_addons/activations/lisht.py b/tensorflow_addons/activations/lisht.py index 3eb7c691aa..a4cb7febde 100644 --- a/tensorflow_addons/activations/lisht.py +++ b/tensorflow_addons/activations/lisht.py @@ -21,6 +21,7 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") +@tf.keras.utils.register_keras_serializable(package="Addons") def lisht(x: types.TensorLike) -> tf.Tensor: """LiSHT: Non-Parameteric Linearly Scaled Hyperbolic Tangent Activation Function. diff --git a/tensorflow_addons/activations/mish.py b/tensorflow_addons/activations/mish.py index fae7a267b5..2a5e5a8f05 100644 --- a/tensorflow_addons/activations/mish.py +++ b/tensorflow_addons/activations/mish.py @@ -21,6 +21,7 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") +@tf.keras.utils.register_keras_serializable(package="Addons") def mish(x: types.TensorLike) -> tf.Tensor: """Mish: A Self Regularized Non-Monotonic Neural Activation Function. diff --git a/tensorflow_addons/activations/rrelu.py b/tensorflow_addons/activations/rrelu.py index c69c04f2f1..960bd7f2d1 100644 --- a/tensorflow_addons/activations/rrelu.py +++ b/tensorflow_addons/activations/rrelu.py @@ -19,6 +19,7 @@ from typing import Optional +@tf.keras.utils.register_keras_serializable(package="Addons") def rrelu( x: types.TensorLike, lower: Number = 0.125, diff --git a/tensorflow_addons/activations/softshrink.py b/tensorflow_addons/activations/softshrink.py index 204f8f074c..238cc19036 100644 --- a/tensorflow_addons/activations/softshrink.py +++ b/tensorflow_addons/activations/softshrink.py @@ -22,6 +22,7 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") +@tf.keras.utils.register_keras_serializable(package="Addons") def softshrink( x: types.TensorLike, lower: Number = -0.5, upper: Number = 0.5 ) -> tf.Tensor: diff --git a/tensorflow_addons/activations/sparsemax.py b/tensorflow_addons/activations/sparsemax.py index 8c098d4100..1dec26d58c 100644 --- a/tensorflow_addons/activations/sparsemax.py +++ b/tensorflow_addons/activations/sparsemax.py @@ -18,6 +18,7 @@ from tensorflow_addons.utils import types +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def sparsemax(logits: types.TensorLike, axis: int = -1) -> tf.Tensor: """Sparsemax activation function [1]. diff --git a/tensorflow_addons/activations/tanhshrink.py b/tensorflow_addons/activations/tanhshrink.py index 217a10f9a3..0c78b78074 100644 --- a/tensorflow_addons/activations/tanhshrink.py +++ b/tensorflow_addons/activations/tanhshrink.py @@ -21,6 +21,7 @@ _activation_so = LazySO("custom_ops/activations/_activation_ops.so") +@tf.keras.utils.register_keras_serializable(package="Addons") def tanhshrink(x: types.TensorLike) -> tf.Tensor: """Applies the element-wise function: x - tanh(x) diff --git a/tensorflow_addons/callbacks/time_stopping.py b/tensorflow_addons/callbacks/time_stopping.py index 823a58cfe2..5198a23322 100644 --- a/tensorflow_addons/callbacks/time_stopping.py +++ b/tensorflow_addons/callbacks/time_stopping.py @@ -18,9 +18,11 @@ import time from typeguard import typechecked +import tensorflow as tf from tensorflow.keras.callbacks import Callback +@tf.keras.utils.register_keras_serializable(package="Addons") class TimeStopping(Callback): """Stop training when a specified amount of time has passed. diff --git a/tensorflow_addons/callbacks/tqdm_progress_bar.py b/tensorflow_addons/callbacks/tqdm_progress_bar.py index d2cf7974a4..94151d3e5f 100644 --- a/tensorflow_addons/callbacks/tqdm_progress_bar.py +++ b/tensorflow_addons/callbacks/tqdm_progress_bar.py @@ -15,12 +15,14 @@ """TQDM Progress Bar.""" import time +import tensorflow as tf from collections import defaultdict from typeguard import typechecked from tensorflow.keras.callbacks import Callback +@tf.keras.utils.register_keras_serializable(package="Addons") class TQDMProgressBar(Callback): """TQDM Progress Bar for Tensorflow Keras. diff --git a/tensorflow_addons/layers/gelu.py b/tensorflow_addons/layers/gelu.py index 4a2b49c174..d9407cd87e 100644 --- a/tensorflow_addons/layers/gelu.py +++ b/tensorflow_addons/layers/gelu.py @@ -19,6 +19,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class GELU(tf.keras.layers.Layer): """Gaussian Error Linear Unit. diff --git a/tensorflow_addons/layers/maxout.py b/tensorflow_addons/layers/maxout.py index b4fd5745cf..b9507e142a 100644 --- a/tensorflow_addons/layers/maxout.py +++ b/tensorflow_addons/layers/maxout.py @@ -18,6 +18,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class Maxout(tf.keras.layers.Layer): """Applies Maxout to the input. diff --git a/tensorflow_addons/layers/normalizations.py b/tensorflow_addons/layers/normalizations.py index 2895e5d647..7322701f5f 100644 --- a/tensorflow_addons/layers/normalizations.py +++ b/tensorflow_addons/layers/normalizations.py @@ -22,6 +22,7 @@ from tensorflow_addons.utils import types +@tf.keras.utils.register_keras_serializable(package="Addons") class GroupNormalization(tf.keras.layers.Layer): """Group normalization layer. @@ -277,6 +278,7 @@ def _create_broadcast_shape(self, input_shape): return broadcast_shape +@tf.keras.utils.register_keras_serializable(package="Addons") class InstanceNormalization(GroupNormalization): """Instance normalization layer. diff --git a/tensorflow_addons/layers/optical_flow.py b/tensorflow_addons/layers/optical_flow.py index 4fee7a02ca..fe65f1d59c 100644 --- a/tensorflow_addons/layers/optical_flow.py +++ b/tensorflow_addons/layers/optical_flow.py @@ -138,6 +138,7 @@ def _correlation_cost_grad(op, grad_output): return [grad_input_a, grad_input_b] +@tf.keras.utils.register_keras_serializable(package="Addons") class CorrelationCost(tf.keras.layers.Layer): """Correlation Cost Layer. diff --git a/tensorflow_addons/layers/poincare.py b/tensorflow_addons/layers/poincare.py index b6a3a11c6b..4f96d92f0c 100644 --- a/tensorflow_addons/layers/poincare.py +++ b/tensorflow_addons/layers/poincare.py @@ -18,6 +18,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class PoincareNormalize(tf.keras.layers.Layer): """Project into the Poincare ball with norm <= 1.0 - epsilon. diff --git a/tensorflow_addons/layers/polynomial.py b/tensorflow_addons/layers/polynomial.py index 5dc0470bb6..4b789ae0fc 100644 --- a/tensorflow_addons/layers/polynomial.py +++ b/tensorflow_addons/layers/polynomial.py @@ -20,6 +20,7 @@ from tensorflow_addons.utils import types +@tf.keras.utils.register_keras_serializable(package="Addons") class PolynomialCrossing(tf.keras.layers.Layer): """Layer for Deep & Cross Network to learn explicit feature interactions. diff --git a/tensorflow_addons/layers/sparsemax.py b/tensorflow_addons/layers/sparsemax.py index 16f9c374f1..fd3ccf8ba3 100644 --- a/tensorflow_addons/layers/sparsemax.py +++ b/tensorflow_addons/layers/sparsemax.py @@ -18,6 +18,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class Sparsemax(tf.keras.layers.Layer): """Sparsemax activation function [1]. diff --git a/tensorflow_addons/layers/tlu.py b/tensorflow_addons/layers/tlu.py index 9b4e54dd48..7698e6d602 100644 --- a/tensorflow_addons/layers/tlu.py +++ b/tensorflow_addons/layers/tlu.py @@ -20,6 +20,7 @@ from tensorflow_addons.utils import types +@tf.keras.utils.register_keras_serializable(package="Addons") class TLU(tf.keras.layers.Layer): """Thresholded Linear Unit. An activation function which is similar to ReLU but with a learned threshold that benefits models using FRN(Filter Response diff --git a/tensorflow_addons/layers/wrappers.py b/tensorflow_addons/layers/wrappers.py index 0978e82568..418cf1657b 100644 --- a/tensorflow_addons/layers/wrappers.py +++ b/tensorflow_addons/layers/wrappers.py @@ -19,6 +19,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class WeightNormalization(tf.keras.layers.Wrapper): """This wrapper reparameterizes a layer by decoupling the weight's magnitude and direction. diff --git a/tensorflow_addons/losses/contrastive.py b/tensorflow_addons/losses/contrastive.py index c4e67824fc..3b4054c434 100644 --- a/tensorflow_addons/losses/contrastive.py +++ b/tensorflow_addons/losses/contrastive.py @@ -20,6 +20,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def contrastive_loss( y_true: TensorLike, y_pred: TensorLike, margin: Number = 1.0 @@ -58,6 +59,7 @@ def contrastive_loss( ) +@tf.keras.utils.register_keras_serializable(package="Addons") class ContrastiveLoss(tf.keras.losses.Loss): r"""Computes the contrastive loss between `y_true` and `y_pred`. diff --git a/tensorflow_addons/losses/focal_loss.py b/tensorflow_addons/losses/focal_loss.py index 74d60f3d60..08f5ee9c17 100644 --- a/tensorflow_addons/losses/focal_loss.py +++ b/tensorflow_addons/losses/focal_loss.py @@ -21,6 +21,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class SigmoidFocalCrossEntropy(tf.keras.losses.Loss): """Implements the focal loss function. @@ -98,6 +99,7 @@ def get_config(self): return {**base_config, **config} +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def sigmoid_focal_crossentropy( y_true: TensorLike, diff --git a/tensorflow_addons/losses/giou_loss.py b/tensorflow_addons/losses/giou_loss.py index 103ca6a188..fa7f63d125 100644 --- a/tensorflow_addons/losses/giou_loss.py +++ b/tensorflow_addons/losses/giou_loss.py @@ -21,6 +21,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class GIoULoss(tf.keras.losses.Loss): """Implements the GIoU loss function. @@ -71,6 +72,7 @@ def call(self, y_true, y_pred): return giou_loss(y_true, y_pred, mode=self.mode) +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def giou_loss(y_true: TensorLike, y_pred: TensorLike, mode: str = "giou") -> tf.Tensor: """ diff --git a/tensorflow_addons/losses/lifted.py b/tensorflow_addons/losses/lifted.py index c3aa8fb4a3..b587cee99c 100644 --- a/tensorflow_addons/losses/lifted.py +++ b/tensorflow_addons/losses/lifted.py @@ -22,6 +22,7 @@ from typing import Optional +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def lifted_struct_loss( labels: TensorLike, embeddings: TensorLike, margin: FloatTensorLike = 1.0 @@ -110,6 +111,7 @@ def lifted_struct_loss( return lifted_loss +@tf.keras.utils.register_keras_serializable(package="Addons") class LiftedStructLoss(tf.keras.losses.Loss): """Computes the lifted structured loss. diff --git a/tensorflow_addons/losses/npairs.py b/tensorflow_addons/losses/npairs.py index a1163d082e..1693123505 100644 --- a/tensorflow_addons/losses/npairs.py +++ b/tensorflow_addons/losses/npairs.py @@ -20,6 +20,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def npairs_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: """Computes the npairs loss between `y_true` and `y_pred`. @@ -61,6 +62,7 @@ def npairs_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: return tf.math.reduce_mean(loss) +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def npairs_multilabel_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: r"""Computes the npairs loss between multilabel data `y_true` and `y_pred`. @@ -125,6 +127,7 @@ def npairs_multilabel_loss(y_true: TensorLike, y_pred: TensorLike) -> tf.Tensor: return tf.math.reduce_mean(loss) +@tf.keras.utils.register_keras_serializable(package="Addons") class NpairsLoss(tf.keras.losses.Loss): """Computes the npairs loss between `y_true` and `y_pred`. @@ -155,6 +158,7 @@ def call(self, y_true, y_pred): return npairs_loss(y_true, y_pred) +@tf.keras.utils.register_keras_serializable(package="Addons") class NpairsMultilabelLoss(tf.keras.losses.Loss): r"""Computes the npairs loss between multilabel data `y_true` and `y_pred`. diff --git a/tensorflow_addons/losses/quantiles.py b/tensorflow_addons/losses/quantiles.py index f61fc9cc77..44e02304f9 100644 --- a/tensorflow_addons/losses/quantiles.py +++ b/tensorflow_addons/losses/quantiles.py @@ -19,6 +19,7 @@ from tensorflow_addons.utils.types import TensorLike, FloatTensorLike +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def pinball_loss( y_true: TensorLike, y_pred: TensorLike, tau: FloatTensorLike = 0.5 @@ -71,6 +72,7 @@ def pinball_loss( return tf.reduce_mean(tf.keras.backend.batch_flatten(pinball), axis=-1) +@tf.keras.utils.register_keras_serializable(package="Addons") class PinballLoss(tf.keras.losses.Loss): """Computes the pinball loss between `y_true` and `y_pred`. diff --git a/tensorflow_addons/losses/sparsemax_loss.py b/tensorflow_addons/losses/sparsemax_loss.py index b506e352bd..9cd6d4433a 100644 --- a/tensorflow_addons/losses/sparsemax_loss.py +++ b/tensorflow_addons/losses/sparsemax_loss.py @@ -21,6 +21,7 @@ from typing import Optional +@tf.keras.utils.register_keras_serializable(package="Addons") def sparsemax_loss( logits: TensorLike, sparsemax: TensorLike, @@ -84,6 +85,7 @@ def sparsemax_loss( @tf.function +@tf.keras.utils.register_keras_serializable(package="Addons") def sparsemax_loss_from_logits( y_true: TensorLike, logits_pred: TensorLike ) -> tf.Tensor: @@ -92,6 +94,7 @@ def sparsemax_loss_from_logits( return loss +@tf.keras.utils.register_keras_serializable(package="Addons") class SparsemaxLoss(tf.keras.losses.Loss): """Sparsemax loss function. diff --git a/tensorflow_addons/losses/triplet.py b/tensorflow_addons/losses/triplet.py index 41267535cf..c78d697298 100644 --- a/tensorflow_addons/losses/triplet.py +++ b/tensorflow_addons/losses/triplet.py @@ -65,6 +65,7 @@ def _masked_minimum(data, mask, dim=1): return masked_minimums +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def triplet_semihard_loss( y_true: TensorLike, y_pred: TensorLike, margin: FloatTensorLike = 1.0 @@ -146,6 +147,7 @@ def triplet_semihard_loss( return triplet_loss +@tf.keras.utils.register_keras_serializable(package="Addons") @tf.function def triplet_hard_loss( y_true: TensorLike, @@ -201,6 +203,7 @@ def triplet_hard_loss( return triplet_loss +@tf.keras.utils.register_keras_serializable(package="Addons") class TripletSemiHardLoss(tf.keras.losses.Loss): """Computes the triplet loss with semi-hard negative mining. @@ -238,6 +241,7 @@ def get_config(self): return {**base_config, **config} +@tf.keras.utils.register_keras_serializable(package="Addons") class TripletHardLoss(tf.keras.losses.Loss): """Computes the triplet loss with hard negative and hard positive mining. diff --git a/tensorflow_addons/metrics/cohens_kappa.py b/tensorflow_addons/metrics/cohens_kappa.py index f250beece1..48631e5f07 100644 --- a/tensorflow_addons/metrics/cohens_kappa.py +++ b/tensorflow_addons/metrics/cohens_kappa.py @@ -24,6 +24,7 @@ from typing import Optional +@tf.keras.utils.register_keras_serializable(package="Addons") class CohenKappa(Metric): """Computes Kappa score between two raters. diff --git a/tensorflow_addons/metrics/f_scores.py b/tensorflow_addons/metrics/f_scores.py index 853de0b7e3..189b2fecc9 100755 --- a/tensorflow_addons/metrics/f_scores.py +++ b/tensorflow_addons/metrics/f_scores.py @@ -21,6 +21,7 @@ from typing import Optional +@tf.keras.utils.register_keras_serializable(package="Addons") class FBetaScore(tf.keras.metrics.Metric): """Computes F-Beta score. @@ -188,6 +189,7 @@ def reset_states(self): self.weights_intermediate.assign(tf.zeros(self.init_shape, self.dtype)) +@tf.keras.utils.register_keras_serializable(package="Addons") class F1Score(FBetaScore): """Computes F-1 Score. diff --git a/tensorflow_addons/metrics/matthews_correlation_coefficient.py b/tensorflow_addons/metrics/matthews_correlation_coefficient.py index 621afe7aaa..b49aff4066 100644 --- a/tensorflow_addons/metrics/matthews_correlation_coefficient.py +++ b/tensorflow_addons/metrics/matthews_correlation_coefficient.py @@ -20,6 +20,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class MatthewsCorrelationCoefficient(tf.keras.metrics.Metric): """Computes the Matthews Correlation Coefficient. diff --git a/tensorflow_addons/optimizers/conditional_gradient.py b/tensorflow_addons/optimizers/conditional_gradient.py index 4a2bc85cb3..1fdbc26a71 100644 --- a/tensorflow_addons/optimizers/conditional_gradient.py +++ b/tensorflow_addons/optimizers/conditional_gradient.py @@ -21,6 +21,7 @@ from typing import Union, Callable +@tf.keras.utils.register_keras_serializable(package="Addons") class ConditionalGradient(tf.keras.optimizers.Optimizer): """Optimizer that implements the Conditional Gradient optimization. diff --git a/tensorflow_addons/optimizers/cyclical_learning_rate.py b/tensorflow_addons/optimizers/cyclical_learning_rate.py index 481ea4348c..9d9bdf1285 100644 --- a/tensorflow_addons/optimizers/cyclical_learning_rate.py +++ b/tensorflow_addons/optimizers/cyclical_learning_rate.py @@ -21,6 +21,7 @@ from typing import Union, Callable +@tf.keras.utils.register_keras_serializable(package="Addons") class CyclicalLearningRate(tf.keras.optimizers.schedules.LearningRateSchedule): """A LearningRateSchedule that uses cyclical schedule.""" @@ -108,6 +109,7 @@ def get_config(self): } +@tf.keras.utils.register_keras_serializable(package="Addons") class TriangularCyclicalLearningRate(CyclicalLearningRate): @typechecked def __init__( @@ -168,6 +170,7 @@ def __init__( ) +@tf.keras.utils.register_keras_serializable(package="Addons") class Triangular2CyclicalLearningRate(CyclicalLearningRate): @typechecked def __init__( @@ -228,6 +231,7 @@ def __init__( ) +@tf.keras.utils.register_keras_serializable(package="Addons") class ExponentialCyclicalLearningRate(CyclicalLearningRate): @typechecked def __init__( diff --git a/tensorflow_addons/optimizers/lamb.py b/tensorflow_addons/optimizers/lamb.py index 597045de73..d5a5807048 100644 --- a/tensorflow_addons/optimizers/lamb.py +++ b/tensorflow_addons/optimizers/lamb.py @@ -26,6 +26,7 @@ from tensorflow_addons.utils.types import FloatTensorLike +@tf.keras.utils.register_keras_serializable(package="Addons") class LAMB(tf.keras.optimizers.Optimizer): """Optimizer that implements the Layer-wise Adaptive Moments (LAMB). diff --git a/tensorflow_addons/optimizers/lazy_adam.py b/tensorflow_addons/optimizers/lazy_adam.py index 0221ae7cba..869cec5b08 100644 --- a/tensorflow_addons/optimizers/lazy_adam.py +++ b/tensorflow_addons/optimizers/lazy_adam.py @@ -27,6 +27,7 @@ from typing import Union, Callable +@tf.keras.utils.register_keras_serializable(package="Addons") class LazyAdam(tf.keras.optimizers.Adam): """Variant of the Adam optimizer that handles sparse updates more efficiently. diff --git a/tensorflow_addons/optimizers/lookahead.py b/tensorflow_addons/optimizers/lookahead.py index 721e3295ce..7d63e757e0 100644 --- a/tensorflow_addons/optimizers/lookahead.py +++ b/tensorflow_addons/optimizers/lookahead.py @@ -20,6 +20,7 @@ from typing import Union +@tf.keras.utils.register_keras_serializable(package="Addons") class Lookahead(tf.keras.optimizers.Optimizer): """This class allows to extend optimizers with the lookahead mechanism. diff --git a/tensorflow_addons/optimizers/moving_average.py b/tensorflow_addons/optimizers/moving_average.py index e7096b8b9a..5d3ccaa0e9 100644 --- a/tensorflow_addons/optimizers/moving_average.py +++ b/tensorflow_addons/optimizers/moving_average.py @@ -22,6 +22,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class MovingAverage(AveragedOptimizerWrapper): """Optimizer that computes a moving average of the variables. diff --git a/tensorflow_addons/optimizers/novograd.py b/tensorflow_addons/optimizers/novograd.py index 04453ac5cb..f9edb7f6e5 100644 --- a/tensorflow_addons/optimizers/novograd.py +++ b/tensorflow_addons/optimizers/novograd.py @@ -24,6 +24,7 @@ from tensorflow.python.training import training_ops +@tf.keras.utils.register_keras_serializable(package="Addons") class NovoGrad(tf.keras.optimizers.Optimizer): """The NovoGrad Optimizer was first proposed in [Stochastic Gradient Methods with Layerwise Adaptvie Moments for training of Deep diff --git a/tensorflow_addons/optimizers/rectified_adam.py b/tensorflow_addons/optimizers/rectified_adam.py index 8a3c63c39d..ee2c29efb7 100644 --- a/tensorflow_addons/optimizers/rectified_adam.py +++ b/tensorflow_addons/optimizers/rectified_adam.py @@ -21,6 +21,7 @@ from typeguard import typechecked +@tf.keras.utils.register_keras_serializable(package="Addons") class RectifiedAdam(tf.keras.optimizers.Optimizer): """Variant of the Adam optimizer whose adaptive learning rate is rectified so as to have a consistent variance. diff --git a/tensorflow_addons/optimizers/stochastic_weight_averaging.py b/tensorflow_addons/optimizers/stochastic_weight_averaging.py index c0d76e1588..baad30027d 100644 --- a/tensorflow_addons/optimizers/stochastic_weight_averaging.py +++ b/tensorflow_addons/optimizers/stochastic_weight_averaging.py @@ -30,6 +30,7 @@ from typing import Union +@tf.keras.utils.register_keras_serializable(package="Addons") class SWA(AveragedOptimizerWrapper): """This class extends optimizers with Stochastic Weight Averaging (SWA). diff --git a/tensorflow_addons/optimizers/weight_decay_optimizers.py b/tensorflow_addons/optimizers/weight_decay_optimizers.py index ec81df38d8..1f5dbee9c4 100644 --- a/tensorflow_addons/optimizers/weight_decay_optimizers.py +++ b/tensorflow_addons/optimizers/weight_decay_optimizers.py @@ -257,6 +257,7 @@ def __init__( return OptimizerWithDecoupledWeightDecay +@tf.keras.utils.register_keras_serializable(package="Addons") class SGDW(DecoupledWeightDecayExtension, tf.keras.optimizers.SGD): """Optimizer that implements the Momentum algorithm with weight_decay. @@ -334,6 +335,7 @@ def __init__( ) +@tf.keras.utils.register_keras_serializable(package="Addons") class AdamW(DecoupledWeightDecayExtension, tf.keras.optimizers.Adam): """Optimizer that implements the Adam algorithm with weight decay. diff --git a/tensorflow_addons/optimizers/yogi.py b/tensorflow_addons/optimizers/yogi.py index 69d20c0b7a..09db073d1d 100644 --- a/tensorflow_addons/optimizers/yogi.py +++ b/tensorflow_addons/optimizers/yogi.py @@ -49,6 +49,7 @@ def _solve(a, b, c): return w +@tf.keras.utils.register_keras_serializable(package="Addons") class Yogi(tf.keras.optimizers.Optimizer): """Optimizer that implements the Yogi algorithm in Keras. diff --git a/tensorflow_addons/rnn/cell.py b/tensorflow_addons/rnn/cell.py index 4076e365a5..a8b4c23840 100644 --- a/tensorflow_addons/rnn/cell.py +++ b/tensorflow_addons/rnn/cell.py @@ -29,6 +29,7 @@ from typing import Optional +@tf.keras.utils.register_keras_serializable(package="Addons") class NASCell(keras.layers.AbstractRNNCell): """Neural Architecture Search (NAS) recurrent network cell. @@ -225,6 +226,7 @@ def get_config(self): return {**base_config, **config} +@tf.keras.utils.register_keras_serializable(package="Addons") class LayerNormLSTMCell(keras.layers.LSTMCell): """LSTM cell with layer normalization and recurrent dropout. @@ -385,6 +387,7 @@ def _create_norm_layer(self, name): ) +@tf.keras.utils.register_keras_serializable(package="Addons") class LayerNormSimpleRNNCell(keras.layers.SimpleRNNCell): """Cell class for LayerNormSimpleRNN. From c16f9ef794d44ef95e26165fc4b4e9b50104ab12 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 12:36:19 +0000 Subject: [PATCH 04/13] Added some tests. --- tensorflow_addons/register.py | 56 ++++++++++++++++-------------- tensorflow_addons/register_test.py | 10 +++++- 2 files changed, 39 insertions(+), 27 deletions(-) diff --git a/tensorflow_addons/register.py b/tensorflow_addons/register.py index 5f7202ca40..2e4b41c14b 100644 --- a/tensorflow_addons/register.py +++ b/tensorflow_addons/register.py @@ -1,8 +1,5 @@ -import inspect import glob import tensorflow as tf -from tensorflow.keras.utils import register_keras_serializable -import warnings from tensorflow_addons import ( activations, callbacks, @@ -40,31 +37,38 @@ def register_all(keras_objects: bool = True, custom_kernels: bool = True) -> Non def register_keras_objects() -> None: - global already_registered - if already_registered: - warnings.warn( - "Tensorflow Addons' functions and classes are already " - "registered in the Keras custom objects dictionary.", - UserWarning, - ) - for module in SUBMODULES: - for attribute in _get_attributes(module): - if inspect.isclass(attribute) or inspect.isfunction(attribute): - register_keras_serializable(package="Addons")(attribute) - - already_registered = True + # TODO: once layer_test is replaced by a public API + # and we can used unregistered objects with it + # we can remove all decorators. + # And register Keras objects here. + pass def register_custom_kernels() -> None: - custom_ops_dir = os.path.join(get_project_root(), "custom_ops") - all_shared_objects = glob.glob(custom_ops_dir + "/**/*.so", recursive=True) - for shared_object in all_shared_objects: - tf.load_op_library(shared_object) + all_shared_objects = get_all_shared_objects() + if not all_shared_objects: + raise FileNotFoundError( + "No shared objects files were found in the custom ops " + "directory in Tensorflow Addons, check your installation again," + "or, if you don't need custom ops, call `tfa.register_all(custom_kernels=False)`" + " instead." + ) + try: + for shared_object in all_shared_objects: + tf.load_op_library(shared_object) + except tf.errors.NotFoundError as e: + raise RuntimeError( + "One of the shared objects ({}) could not be loaded. This may be " + "due to a number of reasons (incompatible TensorFlow version, buiding from " + "source with different flags, broken install of TensorFlow Addons...). If you" + "wanted to register the shared objects because you needed them when loading your " + "model, you should fix your install of TensorFlow Addons. If you don't " + "use custom ops in your model, you can skip registering custom ops with " + "`tfa.register_all(custom_kernels=False)`".format(shared_object) + ) from e -def _get_attributes(module): - for attr_name in dir(module): - if attr_name.startswith("_"): - continue - attr = getattr(module, attr_name) - yield attr +def get_all_shared_objects(): + custom_ops_dir = os.path.join(get_project_root(), "custom_ops") + all_shared_objects = glob.glob(custom_ops_dir + "/**/*.so", recursive=True) + return list(all_shared_objects) diff --git a/tensorflow_addons/register_test.py b/tensorflow_addons/register_test.py index 899f86b1c4..c8d6ff4dbb 100644 --- a/tensorflow_addons/register_test.py +++ b/tensorflow_addons/register_test.py @@ -1,5 +1,6 @@ import unittest -from tensorflow_addons.register import register_all +import tensorflow as tf +from tensorflow_addons.register import register_all, get_all_shared_objects class AssertRNNCellTest(unittest.TestCase): @@ -10,6 +11,13 @@ def test_multiple_register(self): register_all() register_all() + def test_get_all_shared_objects(self): + all_shared_objects = get_all_shared_objects() + self.assertTrue(len(all_shared_objects) >= 15) + + for file in all_shared_objects: + tf.load_op_library(file) + if __name__ == "__main__": unittest.main() From 055737e9dfec9f7109c2c5b188e4a440757bb548 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 12:50:15 +0000 Subject: [PATCH 05/13] Added the two register. --- tensorflow_addons/register.py | 2 ++ tensorflow_addons/register_test.py | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/tensorflow_addons/register.py b/tensorflow_addons/register.py index 2e4b41c14b..8cf0a3869d 100644 --- a/tensorflow_addons/register.py +++ b/tensorflow_addons/register.py @@ -1,4 +1,5 @@ import glob +from pathlib import Path import tensorflow as tf from tensorflow_addons import ( activations, @@ -71,4 +72,5 @@ def register_custom_kernels() -> None: def get_all_shared_objects(): custom_ops_dir = os.path.join(get_project_root(), "custom_ops") all_shared_objects = glob.glob(custom_ops_dir + "/**/*.so", recursive=True) + all_shared_objects = [x for x in all_shared_objects if Path(x).is_file()] return list(all_shared_objects) diff --git a/tensorflow_addons/register_test.py b/tensorflow_addons/register_test.py index c8d6ff4dbb..f83a3ed2cd 100644 --- a/tensorflow_addons/register_test.py +++ b/tensorflow_addons/register_test.py @@ -13,7 +13,7 @@ def test_multiple_register(self): def test_get_all_shared_objects(self): all_shared_objects = get_all_shared_objects() - self.assertTrue(len(all_shared_objects) >= 15) + self.assertTrue(len(all_shared_objects) >= 4) for file in all_shared_objects: tf.load_op_library(file) From 61b115e3a4afa98988b792de876a75524942c553 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 12:55:51 +0000 Subject: [PATCH 06/13] Removed unused variables. --- tensorflow_addons/register.py | 29 +++-------------------------- 1 file changed, 3 insertions(+), 26 deletions(-) diff --git a/tensorflow_addons/register.py b/tensorflow_addons/register.py index 8cf0a3869d..96216a9a05 100644 --- a/tensorflow_addons/register.py +++ b/tensorflow_addons/register.py @@ -1,33 +1,10 @@ import glob -from pathlib import Path -import tensorflow as tf -from tensorflow_addons import ( - activations, - callbacks, - image, - layers, - losses, - metrics, - optimizers, - rnn, - seq2seq, -) -from tensorflow_addons.utils.resource_loader import get_project_root import os +from pathlib import Path -SUBMODULES = [ - activations, - callbacks, - image, - layers, - losses, - metrics, - optimizers, - rnn, - seq2seq, -] +import tensorflow as tf -already_registered = False +from tensorflow_addons.utils.resource_loader import get_project_root def register_all(keras_objects: bool = True, custom_kernels: bool = True) -> None: From 0d2fe522e9245857dd291de2067d15b31764a79c Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 13:05:19 +0000 Subject: [PATCH 07/13] Private func. --- tensorflow_addons/register.py | 4 ++-- tensorflow_addons/register_test.py | 4 ++-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/tensorflow_addons/register.py b/tensorflow_addons/register.py index 96216a9a05..c384f4d7e7 100644 --- a/tensorflow_addons/register.py +++ b/tensorflow_addons/register.py @@ -23,7 +23,7 @@ def register_keras_objects() -> None: def register_custom_kernels() -> None: - all_shared_objects = get_all_shared_objects() + all_shared_objects = _get_all_shared_objects() if not all_shared_objects: raise FileNotFoundError( "No shared objects files were found in the custom ops " @@ -46,7 +46,7 @@ def register_custom_kernels() -> None: ) from e -def get_all_shared_objects(): +def _get_all_shared_objects(): custom_ops_dir = os.path.join(get_project_root(), "custom_ops") all_shared_objects = glob.glob(custom_ops_dir + "/**/*.so", recursive=True) all_shared_objects = [x for x in all_shared_objects if Path(x).is_file()] diff --git a/tensorflow_addons/register_test.py b/tensorflow_addons/register_test.py index f83a3ed2cd..f264be0080 100644 --- a/tensorflow_addons/register_test.py +++ b/tensorflow_addons/register_test.py @@ -1,6 +1,6 @@ import unittest import tensorflow as tf -from tensorflow_addons.register import register_all, get_all_shared_objects +from tensorflow_addons.register import register_all, _get_all_shared_objects class AssertRNNCellTest(unittest.TestCase): @@ -12,7 +12,7 @@ def test_multiple_register(self): register_all() def test_get_all_shared_objects(self): - all_shared_objects = get_all_shared_objects() + all_shared_objects = _get_all_shared_objects() self.assertTrue(len(all_shared_objects) >= 4) for file in all_shared_objects: From 894d3d4caf1b1be694f3453ee6c4aed718e026d0 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 13:14:09 +0000 Subject: [PATCH 08/13] Explicit modules. --- tools/ci_build/verify/check_typing_info.py | 19 +++++++++++++------ 1 file changed, 13 insertions(+), 6 deletions(-) diff --git a/tools/ci_build/verify/check_typing_info.py b/tools/ci_build/verify/check_typing_info.py index c82f12a25e..3d7246dcf2 100644 --- a/tools/ci_build/verify/check_typing_info.py +++ b/tools/ci_build/verify/check_typing_info.py @@ -18,7 +18,7 @@ from typedapi import ensure_api_is_typed -import tensorflow_addons +import tensorflow_addons as tfa TUTORIAL_URL = "https://docs.python.org/3/library/typing.html" HELP_MESSAGE = ( @@ -30,11 +30,18 @@ EXCEPTION_LIST = [] -modules_list = [] -for attr_name in dir(tensorflow_addons): - attr = getattr(tensorflow_addons, attr_name) - if isinstance(attr, ModuleType): - modules_list.append(attr) +modules_list = [ + tfa, + tfa.activations, + tfa.callbacks, + tfa.image, + tfa.losses, + tfa.metrics, + tfa.optimizers, + tfa.rnn, + tfa.seq2seq, + tfa.text, +] if __name__ == "__main__": From a0443878fc0c7d8ae990cac2908caa2987528124 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sun, 1 Mar 2020 13:17:04 +0000 Subject: [PATCH 09/13] FLake8 --- tools/ci_build/verify/check_typing_info.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/tools/ci_build/verify/check_typing_info.py b/tools/ci_build/verify/check_typing_info.py index 3d7246dcf2..7aea9b2c13 100644 --- a/tools/ci_build/verify/check_typing_info.py +++ b/tools/ci_build/verify/check_typing_info.py @@ -13,9 +13,6 @@ # limitations under the License. # ============================================================================== # - -from types import ModuleType - from typedapi import ensure_api_is_typed import tensorflow_addons as tfa From 69972d17cdf230c096f81c6860c69866dc152604 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Thu, 5 Mar 2020 11:26:25 +0100 Subject: [PATCH 10/13] Added documentation. --- tensorflow_addons/register.py | 58 ++++++++++++++++++++++++++++++++++- 1 file changed, 57 insertions(+), 1 deletion(-) diff --git a/tensorflow_addons/register.py b/tensorflow_addons/register.py index c384f4d7e7..9c76d53994 100644 --- a/tensorflow_addons/register.py +++ b/tensorflow_addons/register.py @@ -8,6 +8,62 @@ def register_all(keras_objects: bool = True, custom_kernels: bool = True) -> None: + """Register TensorFlow Addons' objects in TensorFlow global dictionaries. + + When loading a Keras model that has a TF Addons' function, it is needed + for this function to be known by the Keras deserialization process. + + There are two ways to do this, either do + + ```python + tf.keras.models.load_model( + "my_model.tf", + custom_objects={"LAMB": tfa.image.optimizer.LAMB} + ) + ``` + + or you can do: + ```python + tfa.register_all() + tf.tf.keras.models.load_model("my_model.tf") + ``` + + If the model contains custom ops (compiled ops) of TensorFlow Addons, + and the graph is loaded with `tf.saved_model.load`, then custom ops need + to be registered before to avoid an error of the type: + + ``` + tensorflow.python.framework.errors_impl.NotFoundError: Op type not registered + '...' in binary running on ... Make sure the Op and Kernel are + registered in the binary running in this process. + ``` + + In this case, the only way to make sure that the ops are registered is to call + this function: + + ```python + tfa.register_all() + tf.saved_model.load("my_model.tf") + ``` + + Note that you can call this function multiple times in the same process, + it only has an effect the first time. Afterward, it's just a no-op. + + Args: + keras_objects: boolean, `True` by default. If `True`, register all + Keras objects + with `tf.keras.utils.register_keras_serializable(package="Addons")` + If set to False, doesn't register any Keras objects + of Addons in TensorFlow. + custom_kernels: boolean, `True` by default. If `True`, loads all + custom kernels of TensorFlow Addons with + `tf.load_op_library("path/to/so/file.so")`. Loading the SO files + register them automatically. If `False` doesn't load and register + the shared objects files. Not that it might be useful to turn it off + if your installation of Addons doesn't work well with custom ops. + Returns: + None + """ if keras_objects: register_keras_objects() if custom_kernels: @@ -50,4 +106,4 @@ def _get_all_shared_objects(): custom_ops_dir = os.path.join(get_project_root(), "custom_ops") all_shared_objects = glob.glob(custom_ops_dir + "/**/*.so", recursive=True) all_shared_objects = [x for x in all_shared_objects if Path(x).is_file()] - return list(all_shared_objects) + return all_shared_objects From 44793af808dfeca4cadb9ab75fd8e77414bbfed2 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Fri, 6 Mar 2020 11:36:36 +0100 Subject: [PATCH 11/13] Remove useless setup method. --- tensorflow_addons/register_test.py | 2 -- 1 file changed, 2 deletions(-) diff --git a/tensorflow_addons/register_test.py b/tensorflow_addons/register_test.py index f264be0080..990e15ee1c 100644 --- a/tensorflow_addons/register_test.py +++ b/tensorflow_addons/register_test.py @@ -4,8 +4,6 @@ class AssertRNNCellTest(unittest.TestCase): - def setUp(self): - pass def test_multiple_register(self): register_all() From 12d4ff7af4a0fee7ce562e8a696a1232518177b2 Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Fri, 6 Mar 2020 11:38:40 +0100 Subject: [PATCH 12/13] Black/ --- tensorflow_addons/register_test.py | 1 - 1 file changed, 1 deletion(-) diff --git a/tensorflow_addons/register_test.py b/tensorflow_addons/register_test.py index 990e15ee1c..248e6a24c2 100644 --- a/tensorflow_addons/register_test.py +++ b/tensorflow_addons/register_test.py @@ -4,7 +4,6 @@ class AssertRNNCellTest(unittest.TestCase): - def test_multiple_register(self): register_all() register_all() From c88bb28ce60c883b97e87492c97028bc88981e5c Mon Sep 17 00:00:00 2001 From: gabrieldemarmiesse Date: Sat, 7 Mar 2020 12:22:30 +0000 Subject: [PATCH 13/13] Format BUILD. --- tensorflow_addons/BUILD | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tensorflow_addons/BUILD b/tensorflow_addons/BUILD index ea780e316c..24033291e0 100644 --- a/tensorflow_addons/BUILD +++ b/tensorflow_addons/BUILD @@ -11,8 +11,8 @@ py_library( name = "tensorflow_addons", data = [ "__init__.py", - "register.py", "options.py", + "register.py", "version.py", ], deps = [