Skip to content

Commit

Permalink
update differential entropy plots and add new kl divergence plot
Browse files Browse the repository at this point in the history
  • Loading branch information
Tobias-Brock committed Nov 16, 2023
1 parent 88a8a7b commit 99ec330
Show file tree
Hide file tree
Showing 6 changed files with 82 additions and 6 deletions.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified slides/information-theory/figure/normal_entropy.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file modified slides/information-theory/figure/uni_entropy.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
16 changes: 10 additions & 6 deletions slides/information-theory/rsrc/make_diff_entropy_plots.R
Original file line number Diff line number Diff line change
Expand Up @@ -69,15 +69,17 @@ uni_plot = function(a, b){
geom_segment(aes(x = b, y = 0,
xend = b, yend = uniform_density), color = "blue", size = 1) +
geom_segment(aes(x = a, y = 0,
xend = a, yend = uniform_density), color = "blue", size = 1)
xend = a, yend = uniform_density), color = "blue", size = 1) +
xlim(0, 1.5) +
ylim(0, 1)
return(p)
}

plot1 = uni_plot(0, 1)
plot2 = uni_plot(2,8)
plot1 = uni_plot(0, 1.5)
plot2 = uni_plot(0, 1)

p = grid.arrange(plot1, plot2, ncol = 2)
ggsave("..figure/uni_entropy.png", plot = p, width = 8, height = 3)
ggsave("..figure/uni_entropy.png", plot = p, width = 7, height = 3)


########## CREATE NORMAL DISTRIBUTIONS
Expand All @@ -97,12 +99,14 @@ normal_plot = function(mu,sigma){
p = ggplot(data, aes(x = x)) +
geom_line(aes(y = NormalDensity), color = "blue", size = 1) +
labs(title = paste("Differential entropy:", entropy_normal), x = "x",
y = sprintf("N(%d,%s)", mu, sigma))
y = sprintf("N(%d,%s)", mu, sigma)) +
xlim(-4, 4) +
ylim(0, 0.4)
return(p)
}

plot1 = normal_plot(0, 1)
plot2 = normal_plot(2,8)
plot2 = normal_plot(0, 1.5)

p = grid.arrange(plot1, plot2, ncol = 2)
ggsave("..figure/normal_entropy.png", plot = p, width = 8, height = 3)
Expand Down
60 changes: 60 additions & 0 deletions slides/information-theory/rsrc/make_kl_calculation_plot.R
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
library(ggplot2)
library(gridExtra)
library(extraDistr)

####### PLOT KL DIVERGENCE FOR NORMAL AND LAPLACE DISTRIBUTION

set.seed(123)

x <- seq(-4, 4, length.out = 1000)

norm_density <- dnorm(x, 0, 1)
lp_density <- dlaplace(x, 0, 1.5)
ratio_norm_lp <- norm_density/lp_density
log_ratio <- log(ratio_norm_lp)
dens_ratio <- norm_density*log_ratio
data <- data.frame(x = x, NormalDensity = norm_density,
LaPlaceDensity = lp_density,
Ratio_Density = ratio_norm_lp,
LogRatio = log_ratio,
DensityRatio = dens_ratio)

integrand <- function(x) {

n_density <- dnorm(x, 0, 1)
l_density <- dlaplace(x, 0, 1.5)
log_ratio <- log(n_density/l_density)
n_density*log_ratio
}

result <- integrate(integrand, lower = 0, upper = 1)
kl <- round(result$value, 2)

plot1 = ggplot(data, aes(x = x)) +
geom_line(aes(y = NormalDensity), color = "blue", size = 1, linetype = "solid") +
geom_line(aes(y = LaPlaceDensity), color = "red", size = 1, linetype = "solid") +
labs(title = "N(0,1) and LP(0,1.5) Densities", x = "x", y = "Density") +
scale_color_manual(values = c("blue"))

plot2 = ggplot(data, aes(x = x)) +
geom_line(aes(y = Ratio_Density), color = "darkgreen", size = 1, linetype = "solid") +
labs(title = "Ratio of Densities", x = "x", y = "p(x)/q(x)") +
scale_color_manual(values = c("red"))

plot3 = ggplot(data, aes(x = x)) +
geom_line(aes(y = LogRatio), color = "purple", size = 1, linetype = "solid") +
labs(title = "Log-Ratio of Densities", x = "x", y = "log(p(x)/q(x))") +
scale_color_manual(values = c("red"))

plot4 = ggplot(data, aes(x = x)) +
geom_line(aes(y = DensityRatio), color = "orange", size = 1, linetype = "solid") +
labs(title = "Integrand", x = "x", y = "p(x)*log(p(x)/q(x))") +
geom_ribbon(aes(ymax = DensityRatio, ymin = 0), fill = "grey", alpha = 0.5) +
geom_text(aes(x = 2.5, y = 0.1, label = paste("D_KL =",kl)), color = "black", size = 3) +
scale_color_manual(values = c("orange"))

p = grid.arrange(plot1, plot2, plot3, plot4, ncol = 2)
ggsave("..figure/kl_calculation_plot.png", plot = p, width =8, height = 5)



12 changes: 12 additions & 0 deletions slides/information-theory/slides-info-kl.tex
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,18 @@

\end{vbframe}

\begin{vbframe} {KL-Divergence Example}

Consider the KL-Divergence between two continuous distributions with $p(X)=N(0,1)$ and $q(X)=LP(0, 1.5)$ given by

$$ D_{KL}(p \| q) = \int_{x \in \Xspace} p(x) \cdot \log \frac{p(x)}{q(x)}. $$

\begin{figure}
\includegraphics[width = 8cm ]{figure/kl_calculation_plot.png}
\end{figure}

\end{vbframe}

\begin{vbframe} {Information Inequality}

$ D_{KL}(p \| q) \geq 0$ holds always true for any pair of distributions, and holds with equality if and only if $p=q$.
Expand Down

0 comments on commit 99ec330

Please sign in to comment.