forked from jonathanventura/self-supervised-poisson-gaussian
-
Notifications
You must be signed in to change notification settings - Fork 0
/
gmm_posterior_expected_value.py
82 lines (69 loc) · 2.27 KB
/
gmm_posterior_expected_value.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
import numpy as np
import keras.backend as K
import tensorflow as tf
def p(i, v, i2=""):
print(i, i2, K.eval(K.mean(v)))
def gmm_posterior_expected_value(components, mus, sigs, weights, z, noisesig):
"""
Args:
components: int, number of components
mus: locs
sigs: std devs
weights: mixture weights / coefficients
noisesig: float
z: float
"""
sqr = K.square
z = K.squeeze(K.cast(z, "float32"), axis=-1)
# constant factor
const = K.exp( -sqr(z) / (2 * sqr(noisesig)) )
# numerator and denominator summations, for each distribution in components
numerator = 0
denominator = 0
for i in range(components):
# select each component layer
mu = mus[:,:,:,i]
sig = sigs[:,:,:,i]
wt = weights[:,:,:,i]
num_term = wt * ( sqr(noisesig) * mu + sqr(sig) * z )
num_term *= K.exp( -sqr(mu) / (2 * sqr(sig)) )
num_term /= K.pow( (sqr(noisesig) + sqr(sig)), 3/2 )
exponent = K.clip(
( sqr( sqr(noisesig) * mu + sqr(sig) * z ) ) /
( 2 * sqr(noisesig) * sqr(sig) * (sqr(noisesig) + sqr(sig)) ), -70, 70)
num_term *= K.exp(exponent)
numerator += num_term
den_term = wt / (K.sqrt( sqr(noisesig) + sqr(sig) ))
den_term *= K.exp(
-(sqr(mu - z)) /
(2 * (sqr(noisesig) + sqr(sig)))
)
denominator += den_term
result = const * numerator / denominator
# replace nans with large positive
result = tf.where(tf.math.is_nan(result), tf.fill(result.shape, 1e25), result)
return result
def test_gm_expected():
result = gmm_posterior_expected_value(
components=3,
mus=tf.constant([
[[-30, 70, 20],[400, 20, 20]],
[[50, -20, 20],[200, 0, 20]]
]),
sigs=tf.constant([
[[100, 30, 30],[200, 12, 30]],
[[20, 300, 30],[99, 2, 30]]
]),
weights=tf.constant([
[[0.6, 0.4, 0],[0.2, 0.8, 0]],
[[0.5, 0.5, 0],[0.65, 0.35, 0]]
]),
z=-710,
noisesig=50
)
result = K.eval(result)
expected = -574
assert (abs(result[0][0] - expected) < 0.001)
print("success")
if __name__ == "__main__":
test_gm_expected()