Skip to content

Commit

Permalink
updated generator classes and tests for keras3
Browse files Browse the repository at this point in the history
  • Loading branch information
mbarbetti committed Jun 19, 2024
1 parent 95d8375 commit 8bbcfe9
Show file tree
Hide file tree
Showing 7 changed files with 355 additions and 61 deletions.
13 changes: 11 additions & 2 deletions src/pidgan/players/generators/__init__.py
Original file line number Diff line number Diff line change
@@ -1,2 +1,11 @@
from .Generator import Generator
from .ResGenerator import ResGenerator
import keras as k

k_vrs = k.__version__.split(".")[:2]
k_vrs = float(".".join([n for n in k_vrs]))

if k_vrs >= 3.0:
from .k3.Generator import Generator
from .k3.ResGenerator import ResGenerator
else:
from .k2.Generator import Generator
from .k2.ResGenerator import ResGenerator
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
import keras as k
import tensorflow as tf
from tensorflow import keras

LEAKY_ALPHA = 0.1


class Generator(keras.Model):
class Generator(k.Model):
def __init__(
self,
output_dim,
Expand All @@ -18,7 +18,6 @@ def __init__(
) -> None:
super().__init__(name=name, dtype=dtype)
self._hidden_activation_func = None
self._model = None

# Output dimension
assert output_dim >= 1
Expand Down Expand Up @@ -64,13 +63,15 @@ def __init__(
# Output activation
self._output_activation = output_activation

def _define_arch(self) -> keras.Sequential:
model = keras.Sequential(name=f"{self.name}_seq" if self.name else None)
def build(self, input_shape) -> None:
input_dim = input_shape[-1] + self._latent_dim
seq = k.Sequential(name=f"{self.name}_seq" if self.name else None)
seq.add(k.layers.InputLayer(input_shape=(input_dim,)))
for i, (units, rate) in enumerate(
zip(self._mlp_hidden_units, self._mlp_dropout_rates)
):
model.add(
keras.layers.Dense(
seq.add(
k.layers.Dense(
units=units,
activation=self._hidden_activation_func,
kernel_initializer="glorot_uniform",
Expand All @@ -80,18 +81,19 @@ def _define_arch(self) -> keras.Sequential:
)
)
if self._hidden_activation_func is None:
model.add(
keras.layers.LeakyReLU(
seq.add(
k.layers.LeakyReLU(
alpha=LEAKY_ALPHA, name=f"leaky_relu_{i}" if self.name else None
)
)
model.add(
keras.layers.Dropout(
# TODO: implement alternative hidden activation func
seq.add(
k.layers.Dropout(
rate=rate, name=f"dropout_{i}" if self.name else None
)
)
model.add(
keras.layers.Dense(
seq.add(
k.layers.Dense(
units=self._output_dim,
activation=self._output_activation,
kernel_initializer="glorot_uniform",
Expand All @@ -100,13 +102,7 @@ def _define_arch(self) -> keras.Sequential:
dtype=self.dtype,
)
)
return model

def _build_model(self, x) -> None:
if self._model is None:
self._model = self._define_arch()
else:
pass
self._model = seq

def _prepare_input(self, x, seed=None) -> tuple:
latent_sample = tf.random.normal(
Expand All @@ -120,18 +116,18 @@ def _prepare_input(self, x, seed=None) -> tuple:
return x, latent_sample

def call(self, x) -> tf.Tensor:
# TODO: add warning for model.build()
x, _ = self._prepare_input(x, seed=None)
self._build_model(x)
out = self._model(x)
return out

def summary(self, **kwargs) -> None:
self._model.summary(**kwargs)

def generate(self, x, seed=None, return_latent_sample=False) -> tf.Tensor:
# TODO: add warning for model.build()
tf.random.set_seed(seed=seed)
x, latent_sample = self._prepare_input(x, seed=seed)
self._build_model(x)
out = self._model(x)
if return_latent_sample:
return out, latent_sample
Expand Down Expand Up @@ -163,5 +159,5 @@ def output_activation(self):
return self._output_activation

@property
def export_model(self) -> keras.Sequential:
def export_model(self) -> k.Sequential:
return self._model
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from tensorflow import keras
from pidgan.players.generators.Generator import Generator
import keras as k

from pidgan.players.generators.k2.Generator import Generator

LEAKY_ALPHA = 0.1

Expand Down Expand Up @@ -50,9 +51,9 @@ def __init__(
def _define_arch(self) -> None:
self._hidden_layers = list()
for i in range(self._num_hidden_layers):
seq = list()
seq.append(
keras.layers.Dense(
res_block = list()
res_block.append(
k.layers.Dense(
units=self._mlp_hidden_units,
activation=self._hidden_activation_func,
kernel_initializer="glorot_uniform",
Expand All @@ -62,26 +63,27 @@ def _define_arch(self) -> None:
)
)
if self._hidden_activation_func is None:
seq.append(
keras.layers.LeakyReLU(
res_block.append(
k.layers.LeakyReLU(
alpha=LEAKY_ALPHA, name=f"leaky_relu_{i}" if self.name else None
)
)
seq.append(
keras.layers.Dropout(
# TODO: implement alternative hidden activation func
res_block.append(
k.layers.Dropout(
rate=self._mlp_dropout_rates,
name=f"dropout_{i}" if self.name else None,
)
)
self._hidden_layers.append(seq)
self._hidden_layers.append(res_block)

self._add_layers = list()
for i in range(self._num_hidden_layers - 1):
self._add_layers.append(
keras.layers.Add(name=f"add_{i}-{i+1}" if self.name else None)
k.layers.Add(name=f"add_{i}-{i+1}" if self.name else None)
)

self._out = keras.layers.Dense(
self._out = k.layers.Dense(
units=self._output_dim,
activation=self._output_activation,
kernel_initializer="glorot_uniform",
Expand All @@ -90,29 +92,27 @@ def _define_arch(self) -> None:
dtype=self.dtype,
)

def _build_model(self, x) -> None:
if self._model is None:
self._define_arch()
inputs = keras.layers.Input(shape=x.shape[1:])
x_ = inputs
for layer in self._hidden_layers[0]:
x_ = layer(x_)
for i in range(1, self._num_hidden_layers):
h_ = x_
for layer in self._hidden_layers[i]:
h_ = layer(h_)
if self._enable_res_blocks:
x_ = self._add_layers[i - 1]([x_, h_])
else:
x_ = h_
outputs = self._out(x_)
self._model = keras.Model(
inputs=inputs,
outputs=outputs,
name=f"{self.name}_func" if self.name else None,
)
else:
pass
def build(self, input_shape) -> None:
input_dim = input_shape[-1] + self._latent_dim
self._define_arch()
inputs = k.layers.Input(shape=(input_dim,))
x_ = inputs
for layer in self._hidden_layers[0]:
x_ = layer(x_)
for i in range(1, self._num_hidden_layers):
h_ = x_
for layer in self._hidden_layers[i]:
h_ = layer(h_)
if self._enable_res_blocks:
x_ = self._add_layers[i - 1]([x_, h_])
else:
x_ = h_
outputs = self._out(x_)
self._model = k.Model(
inputs=inputs,
outputs=outputs,
name=f"{self.name}_func" if self.name else None,
)

@property
def mlp_hidden_units(self) -> int:
Expand All @@ -123,5 +123,5 @@ def mlp_dropout_rates(self) -> float:
return self._mlp_dropout_rates

@property
def export_model(self) -> keras.Model:
def export_model(self) -> k.Model:
return self._model
Empty file.
Loading

0 comments on commit 8bbcfe9

Please sign in to comment.