# python - 在TensorFlow中，python KL散度

`NotImplementedError: No KL(dist_a || dist_b) registered for dist_a type Tensor and dist_b type Tensor`

KL散度定义为：

`KL(prob_a, prob_b) = Sum(prob_a * log(prob_a/prob_b))`

`H(prob_a, prob_b) = -Sum(prob_a * log(prob_b))`

`KL = tf.reduce_mean(-tf.nn.softmax_cross_entropy_with_logits(prob_a, y))`

``````
KL(prob_a, prob_b)

= Sum(prob_a * log(prob_a/prob_b))

= Sum(prob_a * log(prob_a) - prob_a * log(prob_b))

= - Sum(prob_a * log(prob_b)) + Sum(prob_a * log(prob_a))

= - Sum(prob_a * log(prob_b)) + const

= H(prob_a, prob_b) + const

``````

``````
def kl(x, y):

X = tf.distributions.Categorical(probs=x)

Y = tf.distributions.Categorical(probs=y)

return tf.distributions.kl_divergence(X, Y)

result = kl(prob_a, prob_b)

``````

``````
import numpy as np

import tensorflow as tf

a = np.array([[0.25, 0.1, 0.65], [0.8, 0.15, 0.05]])

b = np.array([[0.7, 0.2, 0.1], [0.15, 0.8, 0.05]])

sess = tf.Session()

print(kl(a, b).eval(session=sess)) # [0.88995184 1.08808468]

``````

``````
np.sum(a * np.log(a / b), axis=1)

``````

``````
ds = tf.contrib.distributions

p = ds.Normal(loc=0., scale=1.)

q = ds.Normal(loc=1., scale=2.)

kl = ds.kl_divergence(p, q)

# ==> 0.44314718

``````

``````
prob_a = tf.nn.softmax(a)

cr_aa = tf.nn.softmax_cross_entropy_with_logits(prob_a, a)

cr_ab = tf.nn.softmax_cross_entropy_with_logits(prob_a, b)

kl_ab = tf.reduce_sum(cr_ab - cr_aa)

``````