Skip to content

Commit

Permalink
init
Browse files Browse the repository at this point in the history
  • Loading branch information
alterzero committed Mar 29, 2020
1 parent 340e491 commit cf7a0bc
Show file tree
Hide file tree
Showing 37 changed files with 59,163 additions and 0 deletions.
5 changes: 5 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@

FBPN_NEW/.DS_Store
weights/.DS_Store
ucf101_interp_ours/.DS_Store
.DS_Store
116 changes: 116 additions & 0 deletions autoencoder_v4.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,116 @@
import torch
import torch.nn as nn
import torch.nn.functional as F

class UNet(nn.Module):
def __init__(self, in_channels, out_classes):
super(UNet, self).__init__()
self.inc = inconv(in_channels, 64)
self.down1 = down(64, 128)
self.down2 = down(128, 256)
self.down3 = down(256, 512)
self.down4 = down(512, 1024)
self.down5 = down(1024, 1024)
self.up1 = up(2048, 512)
self.up2 = up(1024, 256)
self.up3 = up(512, 128)
self.up4 = up(256, 64)
self.up5 = up(128, 64)
self.outc = outconv(64, out_classes)

def forward(self, x):
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x6 = self.down5(x5)
x = self.up1(x6, x5)
x = self.up2(x, x4)
x = self.up3(x, x3)
x = self.up4(x, x2)
x = self.up5(x, x1)
x = self.outc(x)
return torch.sigmoid(x)

class double_conv(nn.Module):
'''(conv => BN => ReLU) * 2'''
def __init__(self, in_ch, out_ch):
super(double_conv, self).__init__()
self.conv = nn.Sequential(
nn.Conv2d(in_ch, out_ch, 3, padding=1),
nn.BatchNorm2d(out_ch),
nn.ReLU(inplace=True),
nn.Conv2d(out_ch, out_ch, 3, padding=1),
nn.BatchNorm2d(out_ch),
nn.ReLU(inplace=True)
)

def forward(self, x):
x = self.conv(x)
return x


class inconv(nn.Module):
def __init__(self, in_ch, out_ch):
super(inconv, self).__init__()
self.conv = double_conv(in_ch, out_ch)

def forward(self, x):
x = self.conv(x)
return x


class down(nn.Module):
def __init__(self, in_ch, out_ch):
super(down, self).__init__()
self.mpconv = nn.Sequential(
nn.MaxPool2d(2),
double_conv(in_ch, out_ch)
)

def forward(self, x):
x = self.mpconv(x)
return x


class up(nn.Module):
def __init__(self, in_ch, out_ch, bilinear=True):
super(up, self).__init__()

# would be a nice idea if the upsampling could be learned too,
# but my machine do not have enough memory to handle all those weights
if bilinear:
self.up = nn.Upsample(scale_factor=2.0, mode='bilinear', align_corners=True)
else:
self.up = nn.ConvTranspose2d(in_ch//2, in_ch//2, 2, stride=2)

self.conv = double_conv(in_ch, out_ch)

def forward(self, x1, x2):
x1 = self.up(x1)

# input is CHW
diffY = x2.size()[2] - x1.size()[2]
diffX = x2.size()[3] - x1.size()[3]

x1 = F.pad(x1, (diffX // 2, diffX - diffX//2,
diffY // 2, diffY - diffY//2))

# for padding issues, see
# https://github.com/HaiyongJiang/U-Net-Pytorch-Unstructured-Buggy/commit/0e854509c2cea854e247a9c615f175f76fbb2e3a
# https://github.com/xiaopeng-liao/Pytorch-UNet/commit/8ebac70e633bac59fc22bb5195e513d5832fb3bd

x = torch.cat([x2, x1], dim=1)
x = self.conv(x)
return x


class outconv(nn.Module):
def __init__(self, in_ch, out_ch):
super(outconv, self).__init__()
self.conv = nn.Conv2d(in_ch, out_ch, 1)

def forward(self, x):
x = self.conv(x)
return x
Loading

0 comments on commit cf7a0bc

Please sign in to comment.