diff --git a/cornac/models/cdl/recom_cdl.py b/cornac/models/cdl/recom_cdl.py index 164030435..c63b4b573 100644 --- a/cornac/models/cdl/recom_cdl.py +++ b/cornac/models/cdl/recom_cdl.py @@ -134,7 +134,7 @@ def __init__( self.vocab_size = vocab_size self.name = name self.max_iter = max_iter - self.ae_structure = autoencoder_structure + self.autoencoder_structure = autoencoder_structure self.act_fn = act_fn self.batch_size = batch_size self.verbose = verbose @@ -195,9 +195,9 @@ def _fit_cdl(self): # Build model layer_sizes = ( [self.vocab_size] - + self.ae_structure + + self.autoencoder_structure + [self.k] - + self.ae_structure + + self.autoencoder_structure + [self.vocab_size] ) tf.set_random_seed(self.seed) diff --git a/cornac/models/cdr/recom_cdr.py b/cornac/models/cdr/recom_cdr.py index 98ee2f954..dac1fec46 100644 --- a/cornac/models/cdr/recom_cdr.py +++ b/cornac/models/cdr/recom_cdr.py @@ -122,7 +122,7 @@ def __init__( self.learning_rate = learning_rate self.name = name self.max_iter = max_iter - self.ae_structure = autoencoder_structure + self.autoencoder_structure = autoencoder_structure self.act_fn = act_fn self.batch_size = batch_size self.verbose = verbose @@ -160,8 +160,9 @@ def fit(self, train_set, val_set=None): """ Recommender.fit(self, train_set, val_set) + self._init() + if self.trainable: - self._init() self._fit_cdr() return self @@ -183,12 +184,12 @@ def _fit_cdr(self): # Build model layer_sizes = ( [self.vocab_size] - + self.ae_structure + + self.autoencoder_structure + [self.k] - + self.ae_structure + + self.autoencoder_structure + [self.vocab_size] ) - + tf.set_random_seed(self.seed) model = Model( n_users=n_users, n_items=n_items, @@ -215,7 +216,7 @@ def _fit_cdr(self): loop = trange(self.max_iter, disable=not self.verbose) for _ in loop: - corruption_mask = np.random.binomial( + corruption_mask = self.rng.binomial( 1, 1 - self.corruption_rate, (n_items, self.vocab_size) ) sum_loss = 0 diff --git a/cornac/models/conv_mf/recom_convmf.py b/cornac/models/conv_mf/recom_convmf.py index 7cc2940a9..4b96ea7b9 100644 --- a/cornac/models/conv_mf/recom_convmf.py +++ b/cornac/models/conv_mf/recom_convmf.py @@ -89,10 +89,10 @@ def __init__( ): super().__init__(name=name, trainable=trainable, verbose=verbose) self.give_item_weight = give_item_weight - self.max_iter = n_epochs + self.n_epochs = n_epochs self.lambda_u = lambda_u self.lambda_v = lambda_v - self.dimension = k + self.k = k self.dropout_rate = dropout_rate self.emb_dim = emb_dim self.max_len = max_len @@ -114,9 +114,9 @@ def _init(self): vocab_size = self.train_set.item_text.vocab.size if self.U is None: - self.U = xavier_uniform((n_users, self.dimension), rng) + self.U = xavier_uniform((n_users, self.k), rng) if self.V is None: - self.V = xavier_uniform((n_items, self.dimension), rng) + self.V = xavier_uniform((n_items, self.k), rng) if self.W is None: self.W = xavier_uniform((vocab_size, self.emb_dim), rng) @@ -178,8 +178,9 @@ def _fit_convmf(self): from .convmf import CNN_module import tensorflow as tf + tf.set_random_seed(self.seed) cnn_module = CNN_module( - output_dimension=self.dimension, + output_dimension=self.k, dropout_rate=self.dropout_rate, emb_dim=self.emb_dim, max_len=self.max_len, @@ -206,7 +207,7 @@ def _fit_convmf(self): history = 1e-50 loss = 0 - for iter in range(self.max_iter): + for iter in range(self.n_epochs): print("Iteration {}".format(iter + 1)) tic = time.time() @@ -216,8 +217,8 @@ def _fit_convmf(self): V_i = self.V[idx_item] R_i = R_user[i] - A = self.lambda_u * np.eye(self.dimension) + V_i.T.dot(V_i) - B = (V_i * (np.tile(R_i, (self.dimension, 1)).T)).sum(0) + A = self.lambda_u * np.eye(self.k) + V_i.T.dot(V_i) + B = (V_i * (np.tile(R_i, (self.k, 1)).T)).sum(0) self.U[i] = np.linalg.solve(A, B) user_loss[i] = -0.5 * self.lambda_u * np.dot(self.U[i], self.U[i]) @@ -228,10 +229,10 @@ def _fit_convmf(self): U_j = self.U[idx_user] R_j = R_item[j] - A = self.lambda_v * item_weight[j] * np.eye(self.dimension) + U_j.T.dot( + A = self.lambda_v * item_weight[j] * np.eye(self.k) + U_j.T.dot( U_j ) - B = (U_j * (np.tile(R_j, (self.dimension, 1)).T)).sum( + B = (U_j * (np.tile(R_j, (self.k, 1)).T)).sum( 0 ) + self.lambda_v * item_weight[j] * theta[j] self.V[j] = np.linalg.solve(A, B) diff --git a/cornac/models/cvae/recom_cvae.py b/cornac/models/cvae/recom_cvae.py index 6ddacadf7..0ad748c55 100644 --- a/cornac/models/cvae/recom_cvae.py +++ b/cornac/models/cvae/recom_cvae.py @@ -120,8 +120,8 @@ def __init__( self.b = b self.n_epochs = n_epochs self.input_dim = input_dim - self.dimensions = vae_layers - self.n_z = z_dim + self.vae_layers = vae_layers + self.z_dim = z_dim self.loss_type = loss_type self.act_fn = act_fn self.lr = lr @@ -138,9 +138,9 @@ def _init(self): n_users, n_items = self.train_set.num_users, self.train_set.num_items if self.U is None: - self.U = xavier_uniform((n_users, self.n_z), rng) + self.U = xavier_uniform((n_users, self.z_dim), rng) if self.V is None: - self.V = xavier_uniform((n_items, self.n_z), rng) + self.V = xavier_uniform((n_items, self.z_dim), rng) def fit(self, train_set, val_set=None): """Fit the model to observations. @@ -179,18 +179,19 @@ def _fit_cvae(self): from .cvae import Model import tensorflow as tf + tf.set_random_seed(self.seed) model = Model( n_users=self.train_set.num_users, n_items=self.train_set.num_items, input_dim=self.input_dim, U=self.U, V=self.V, - n_z=self.n_z, + n_z=self.z_dim, lambda_u=self.lambda_u, lambda_v=self.lambda_v, lambda_r=self.lambda_r, lambda_w=self.lambda_w, - layers=self.dimensions, + layers=self.vae_layers, loss_type=self.loss_type, act_fn=self.act_fn, seed=self.seed, diff --git a/examples/cdl_example.py b/examples/cdl_example.py index f756c4ebd..810647b61 100644 --- a/examples/cdl_example.py +++ b/examples/cdl_example.py @@ -57,6 +57,7 @@ lambda_v=1, lambda_w=0.1, lambda_n=1000, + seed=123 ) # Use Recall@300 for evaluation diff --git a/examples/cdr_example.py b/examples/cdr_example.py index d417826c0..64f86c25e 100644 --- a/examples/cdr_example.py +++ b/examples/cdr_example.py @@ -60,6 +60,7 @@ lambda_n=5, learning_rate=0.001, vocab_size=8000, + seed=123 ) # Use Recall@300 for evaluation