-
Notifications
You must be signed in to change notification settings - Fork 2
/
gaussian.py
80 lines (51 loc) · 2.22 KB
/
gaussian.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
import torch
from torch.autograd import Variable
import numpy as np
from scipy.stats import norm
from scipy.stats import multivariate_normal
import matplotlib.pyplot as plt
"""
Gaussian Distribution
"""
class gaussian:
def __init__(self, params):
self.mu = params['mu']
self.sigma = params['sigma']
def density(self, x, mu=None, sigma=None):
if mu == None: mu = self.mu
if sigma == None: sigma = self.sigma
p_1 = (((2 * np.pi)**(len(mu)/2)) * (np.linalg.det(sigma))**0.5)
p_2 = (-1/2) * ((x-mu).T.dot(np.linalg.inv(sigma))).dot((x-mu))
return np.exp(p_2)/p_1
def log_density(self, x):
return np.log(self.density(x))
def grad_log_density(self,x, z_optim=False):
dtype = torch.FloatTensor
mu = Variable(torch.Tensor(self.mu).type(dtype), requires_grad=z_optim)
sigma = Variable(torch.Tensor(self.sigma).type(dtype), requires_grad=False)
x = Variable(torch.Tensor(x).type(dtype), requires_grad=True)
y = (-1/2) * torch.dot(x - mu, torch.inverse(sigma).mv(x - mu))
y.backward()
if z_optim:
return dict(x_grad=x.grad, mu_grad=mu.grad)
return x.grad.data.numpy()
# write manual gradient here and a test
def grad_log_density_1(self,x, z_optim=False):
dtype = torch.FloatTensor
mu = Variable(torch.Tensor(self.mu).type(dtype), requires_grad=z_optim)
sigma = Variable(torch.Tensor(self.sigma).type(dtype), requires_grad=False)
x = Variable(torch.Tensor(x).type(dtype), requires_grad=True)
y = (-1/2) * torch.dot(x - mu, torch.inverse(sigma).mv(x - mu))
y.backward()
if z_optim:
return dict(x_grad=x.grad, mu_grad=mu.grad)
return x.grad.data.numpy()
###### check this grad density
def sampler(self, N, mu=None, sigma=None):
if mu == None: mu = self.mu
if sigma == None: sigma = self.sigma
# m, n = sigma.size
# l = torch.from_numpy(np.linalg.cholesky(sigma))
# x = torch.randn(n,)
return multivariate_normal.rvs(mean=mu, cov=sigma, size=N)
# return torch.distributions.multivariate_normal.MultivariateNormal(torch.Tensor(mu), torch.Tensor(sigma))