diff --git a/slides-pdf/lecture_sl.pdf b/slides-pdf/lecture_sl.pdf index 39ab016d..a8ed3cd2 100644 Binary files a/slides-pdf/lecture_sl.pdf and b/slides-pdf/lecture_sl.pdf differ diff --git a/slides-pdf/slides-info-cross-entropy-kld.pdf b/slides-pdf/slides-info-cross-entropy-kld.pdf index 036f9267..989479d0 100644 Binary files a/slides-pdf/slides-info-cross-entropy-kld.pdf and b/slides-pdf/slides-info-cross-entropy-kld.pdf differ diff --git a/slides/information-theory/slides-info-cross-entropy-kld.tex b/slides/information-theory/slides-info-cross-entropy-kld.tex index 3c7c00bc..994a231f 100644 --- a/slides/information-theory/slides-info-cross-entropy-kld.tex +++ b/slides/information-theory/slides-info-cross-entropy-kld.tex @@ -99,7 +99,7 @@ The KL divergence (which is non-negative) between $f(x)$ and $g(x)$ is: \begin{equation} \begin{aligned} - 0 \leq D_{KL}(f \| g) & = -h(f) + H(p \| q) \\ + 0 \leq D_{KL}(f \| g) & = -h(f) + H(f \| g) \\ & =-h(f)-\int_{-\infty}^{\infty} f(x) \log (g(x)) dx \end{aligned} \end{equation}