Skip to content

Experiments1 #32

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 7 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
52 changes: 31 additions & 21 deletions model.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,14 @@
import logging
from math import log, pi

import numpy as np
import torch
from scipy import linalg as la
from torch import nn
from torch.nn import functional as F
from math import log, pi, exp
import numpy as np
from scipy import linalg as la

logabs = lambda x: torch.log(torch.abs(x))
LOGGING_LEVEL = logging.DEBUG


class ActNorm(nn.Module):
Expand All @@ -23,17 +26,17 @@ def initialize(self, input):
flatten = input.permute(1, 0, 2, 3).contiguous().view(input.shape[1], -1)
mean = (
flatten.mean(1)
.unsqueeze(1)
.unsqueeze(2)
.unsqueeze(3)
.permute(1, 0, 2, 3)
.unsqueeze(1)
.unsqueeze(2)
.unsqueeze(3)
.permute(1, 0, 2, 3)
)
std = (
flatten.std(1)
.unsqueeze(1)
.unsqueeze(2)
.unsqueeze(3)
.permute(1, 0, 2, 3)
.unsqueeze(1)
.unsqueeze(2)
.unsqueeze(3)
.permute(1, 0, 2, 3)
)

self.loc.data.copy_(-mean)
Expand Down Expand Up @@ -74,7 +77,7 @@ def forward(self, input):

out = F.conv2d(input, self.weight)
logdet = (
height * width * torch.slogdet(self.weight.squeeze().double())[1].float()
height * width * torch.slogdet(self.weight.squeeze().double())[1].float()
)

return out, logdet
Expand All @@ -88,7 +91,7 @@ def reverse(self, output):
class InvConv2dLU(nn.Module):
def __init__(self, in_channel):
super().__init__()

# print(f'InvConv2dLU constructor with in_channel = {in_channel}')
weight = np.random.randn(in_channel, in_channel)
q, _ = la.qr(weight)
w_p, w_l, w_u = la.lu(q.astype(np.float32))
Expand Down Expand Up @@ -123,9 +126,9 @@ def forward(self, input):

def calc_weight(self):
weight = (
self.w_p
@ (self.w_l * self.l_mask + self.l_eye)
@ ((self.w_u * self.u_mask) + torch.diag(self.s_sign * torch.exp(self.w_s)))
self.w_p
@ (self.w_l * self.l_mask + self.l_eye)
@ ((self.w_u * self.u_mask) + torch.diag(self.s_sign * torch.exp(self.w_s)))
)

return weight.unsqueeze(2).unsqueeze(3)
Expand Down Expand Up @@ -220,7 +223,7 @@ def __init__(self, in_channel, affine=True, conv_lu=True):

else:
self.invconv = InvConv2d(in_channel)

# TODO add InvConv2dSVD
self.coupling = AffineCoupling(in_channel, affine=affine)

def forward(self, input):
Expand Down Expand Up @@ -257,6 +260,7 @@ def __init__(self, in_channel, n_flow, split=True, affine=True, conv_lu=True):
squeeze_dim = in_channel * 4

self.flows = nn.ModuleList()
self.conv_lu = conv_lu
for i in range(n_flow):
self.flows.append(Flow(squeeze_dim, affine=affine, conv_lu=conv_lu))

Expand Down Expand Up @@ -334,16 +338,22 @@ def reverse(self, output, eps=None, reconstruct=False):

class Glow(nn.Module):
def __init__(
self, in_channel, n_flow, n_block, affine=True, conv_lu=True
self, in_channel, n_flow, n_block, affine=True, conv_lu=True
):
super().__init__()

logging.basicConfig(level=LOGGING_LEVEL)
logger = logging.getLogger(self.__class__.__name__)
self.blocks = nn.ModuleList()
n_channel = in_channel
for i in range(n_block - 1):
self.blocks.append(Block(n_channel, n_flow, affine=affine, conv_lu=conv_lu))
b = Block(n_channel, n_flow, affine=affine, conv_lu=conv_lu)
self.blocks.append(b)
logger.debug(f'In {self.__class__.__name__} init, for block {i} , conv_lu = {b.conv_lu}')
n_channel *= 2
self.blocks.append(Block(n_channel, n_flow, split=False, affine=affine))
# Make the last block with the biggest W use full matrix, not LU
b = Block(n_channel, n_flow, split=False, affine=affine,conv_lu=False)
self.blocks.append(b)
logger.debug(f'In {self.__class__.__name__} init, for block {i + 1} , conv_lu = {b.conv_lu}')

def forward(self, input):
log_p_sum = 0
Expand Down
Loading