generated from ihmeuw-msca/pypkg
-
Notifications
You must be signed in to change notification settings - Fork 0
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
179e907
commit 76f881c
Showing
2 changed files
with
77 additions
and
0 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,27 @@ | ||
"""This module contains loss functions with their gradient and hessian""" | ||
|
||
import numpy as np | ||
|
||
def compute_loss(beta, C, c, loss): | ||
""" | ||
""" | ||
x = c - np.matmul(C, beta) | ||
if loss == 'hinge': | ||
loss_val = np.sum(np.power(np.maximum(0.0, -x), 3.0)) | ||
loss_grad = - 3.0 * np.square(np.maximum(0.0, -x)) | ||
loss_hess = np.diag(6.0 * np.maximum(0, -x)) | ||
if loss == 'logit': | ||
loss_val = np.sum(np.log(1.0 + np.exp(-x))) | ||
loss_grad = - np.exp(-x) / (1.0 + np.exp(-x)) | ||
loss_hess = np.diag(np.exp(-x) / np.square(1.0 + np.exp(-x))) | ||
return (loss_val, loss_grad, loss_hess) | ||
|
||
def compute_dist(beta, y, q, method): | ||
""" | ||
""" | ||
if method == 'chi2': | ||
dist_val = np.sum(np.square(beta - y) / (2.0 * q * y)) | ||
dist_grad = (beta / y - 1.0) / q | ||
dist_hess = np.diag(1.0 / (q * y)) | ||
return (dist_val, dist_grad, dist_hess) | ||
|
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,50 @@ | ||
"""Module with methods to solve the raking problem with a penalty loss""" | ||
|
||
import numpy as np | ||
|
||
from scipy.sparse.linalg import cg | ||
|
||
from loss_functions import compute_loss, compute_dist | ||
|
||
def raking_chi2_loss( | ||
y: np.ndarray, | ||
A: np.ndarray, | ||
s: np.ndarray, | ||
C: np.ndarray, | ||
c: np.ndarray, | ||
q: np.ndarray, | ||
method: str = 'chi2', | ||
loss: str = 'logit', | ||
penalty: float = 1.0, | ||
gamma0: float = 1.0, | ||
max_iter: int = 500, | ||
): | ||
""" | ||
""" | ||
beta = np.copy(y) | ||
lambda_k = np.zeros(A.shape[0]) | ||
sol_k = np.concatenate((beta, lambda_k)) | ||
epsilon = 1.0 | ||
iter_eps = 0 | ||
while (epsilon > 1.0e-10) & (iter_eps < max_iter): | ||
(loss_val, loss_grad, loss_hess) = compute_loss(beta, C, c, loss) | ||
(dist_val, dist_grad, dist_hess) = compute_dist(beta, y, q, method) | ||
F1 = dist_grad + np.matmul(np.transpose(A), lambda_k) \ | ||
- penalty * np.matmul(np.transpose(C), loss_grad) | ||
F2 = np.matmul(A, beta) - s | ||
F = np.concatenate((F1, F2)) | ||
J = dist_hess + penalty * np.matmul(np.transpose(C), np.matmul(loss_hess, C)) | ||
J = np.concatenate( | ||
(np.concatenate((J, np.transpose(A)), axis=1), | ||
np.concatenate((A, np.zeros((A.shape[0], A.shape[0]))), axis=1), | ||
), axis=0, | ||
) | ||
delta_sol = cg(J, F)[0] | ||
sol_k = sol_k - delta_sol | ||
beta = sol_k[0:A.shape[1]] | ||
lambda_k = sol_k[A.shape[1]:(A.shape[0] + A.shape[1])] | ||
epsilon = np.mean(np.abs(s - np.matmul(A, beta))) | ||
iter_eps = iter_eps + 1 | ||
print(iter_eps, epsilon, dist_val, loss_val) | ||
return (beta, lambda_k, iter_eps) | ||
|