Skip to content

Commit

Permalink
new file: dsm.py
Browse files Browse the repository at this point in the history
	new file:   dsm_loss.py
	new file:   dsm_utilites.py
  • Loading branch information
chiragnagpal committed Apr 2, 2020
1 parent b75329b commit f566977
Show file tree
Hide file tree
Showing 3 changed files with 702 additions and 0 deletions.
128 changes: 128 additions & 0 deletions dsm.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,128 @@
import torch.nn as nn
import torch

class DeepSurvivalMachines(nn.Module):

def __init__(self, inputdim, k, mlptyp=1, HIDDEN=False, init=False, dist='Weibull'):

super(DeepSurvivalMachines, self).__init__()

shape = 1.
scale = 1.

self.k = k

self.mlptype = mlptyp
self.scale = nn.Parameter(-torch.ones(k))
self.shape = nn.Parameter(-torch.ones(k))

self.HIDDEN = HIDDEN


if mlptyp == 1:

self.gate = nn.Sequential(nn.Linear(inputdim, k, bias=False))
self.scaleg = nn.Sequential(nn.Linear(inputdim, k, bias=True))
self.shapeg = nn.Sequential(nn.Linear(inputdim, k, bias=True))

if mlptyp == 2:

self.gate = nn.Sequential(nn.Linear(HIDDEN[0], k, bias=False))
self.scaleg = nn.Sequential(nn.Linear(HIDDEN[0], k, bias=True))
self.shapeg = nn.Sequential(nn.Linear(HIDDEN[0], k, bias=True))

self.embedding = nn.Sequential(nn.Linear(inputdim, HIDDEN[0], bias=False),
nn.ReLU6())

if mlptyp == 3:

self.gate = nn.Sequential(nn.Linear(HIDDEN[1], k, bias=False))
self.scaleg = nn.Sequential(nn.Linear(HIDDEN[1], k, bias=True))
self.shapeg = nn.Sequential(nn.Linear(HIDDEN[1], k, bias=True))

self.embedding = nn.Sequential(nn.Linear(inputdim, HIDDEN[0], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[0], HIDDEN[1], bias=False),
nn.ReLU6())

if mlptyp == 4:

self.gate = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=False))
self.scaleg = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=True))
self.shapeg = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=True))

self.embedding = nn.Sequential(nn.Linear(inputdim, HIDDEN[0], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[0], HIDDEN[1], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[1], HIDDEN[2], bias=False),
nn.ReLU6())

if mlptyp == 5:

self.gate = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=False))
self.scaleg = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=True))
self.shapeg = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=True))

self.embedding = nn.Sequential(nn.Linear(inputdim, HIDDEN[0], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[0], HIDDEN[1], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[1], HIDDEN[2], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[2], HIDDEN[3], bias=False),
nn.ReLU6())

if mlptyp == 6:

self.gate = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=False))
self.scaleg = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=True))
self.shapeg = nn.Sequential(nn.Linear(HIDDEN[2], k, bias=True))

self.embedding = nn.Sequential(nn.Linear(inputdim, HIDDEN[0], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[0], HIDDEN[1], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[1], HIDDEN[2], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[2], HIDDEN[3], bias=False),
nn.ReLU6(),
nn.Linear(HIDDEN[3], HIDDEN[4], bias=False),
nn.ReLU6())

if init is not False:

self.shape.data.fill_(init[0])
self.scale.data.fill_(init[1])


self.dist = dist

if self.dist == 'Weibull':

self.act = nn.SELU()

elif self.dist == 'LogNormal':

self.act = nn.Tanh()

def forward(self, x, adj=True):

if self.mlptype == 1:

embed = x

else:

embed = self.embedding(x)

if adj:

return self.act(self.shapeg(embed))+self.shape.expand(x.shape[0],-1), self.act(self.scaleg(embed))+self.scale.expand(x.shape[0],-1), self.gate(embed)/1000

else:

return self.shape, self.scale, self.gate(embed)/1000



Loading

0 comments on commit f566977

Please sign in to comment.