From 0b6e7affc07c6d56f22f29f96659f1d7eae9c2aa Mon Sep 17 00:00:00 2001 From: ludwigbothmann <46222472+ludwigbothmann@users.noreply.github.com> Date: Thu, 9 Nov 2023 15:13:00 +0100 Subject: [PATCH] Updates from Overleaf --- slides/information-theory/slides-info-entropy.tex | 2 ++ 1 file changed, 2 insertions(+) diff --git a/slides/information-theory/slides-info-entropy.tex b/slides/information-theory/slides-info-entropy.tex index a3f239df..ecf56999 100644 --- a/slides/information-theory/slides-info-entropy.tex +++ b/slides/information-theory/slides-info-entropy.tex @@ -178,6 +178,8 @@ \implies p_i = 2^{(-1 - \lambda)} \implies p_i = \frac{1}{g}, \end{gather*} where the last step follows from the fact that all $p_i$ are equal and the constraint. + \vspace{0.2cm}\\ + \textbf{NB}: We also could have solved the constraint for $p_1$ and substitute $p_1=1-\sum_{i=2}^{g} p_i$ in the objective to avoid constrained optimization. \end{vbframe}