Skip to content

Commit

Permalink
Merge overleaf-2023-12-20-1153 into main
Browse files Browse the repository at this point in the history
  • Loading branch information
ludwigbothmann authored Dec 20, 2023
2 parents 710338c + aa83168 commit c52d781
Show file tree
Hide file tree
Showing 3 changed files with 4 additions and 7 deletions.
3 changes: 0 additions & 3 deletions slides/information-theory/chapter-order.tex
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,3 @@ \subsection{Entropy and Optimal Code Length II}

\subsection{Mutual Information under Reparametrization (Deep-Dive)}
\includepdf[pages=-]{../slides-pdf/slides-info-mi-deepdive.pdf}



2 changes: 1 addition & 1 deletion slides/information-theory/slides-info-kl-ml.tex
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
\framebreak
\begin{itemize}
\item \textbf{Probabilistic model fitting}\\
Assume our learner is probabilistic, i.e., we model $p(y| \mathbf{x})$ for example (for example, ridge regression, logistic regression, ...).
Assume our learner is probabilistic, i.e., we model $p(y| \mathbf{x})$ (for example, logistic regression, Gaussian process, ...).

\begin{center}
\includegraphics[width=0.5\linewidth]{figure/ftrue.pdf}
Expand Down
6 changes: 3 additions & 3 deletions slides/information-theory/slides-info-sourcecoding2.tex
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
\input{../../latex-math/basic-math}
\input{../../latex-math/basic-ml}

\newcommand{\titlefigure}{figure_man/equal_decode.png}
\newcommand{\titlefigure}{figure_man/xent_pq.png}
\newcommand{\learninggoals}{
\item Know connection between source coding and (cross-)entropy
\item Know that the entropy of the source distribution is the lower bound for the average code length
Expand All @@ -14,13 +14,13 @@

\begin{document}

\lecturechapter{Entropy and Optimal Code Length II}
\lecturechapter{Source Coding and Cross-Entropy}
\lecture{Introduction to Machine Learning}


%%%%%%% CUT HERE SECOND SOURCE CODING CHUNK

\begin{vbframe} {Source coding and (cross-)entropy}
\begin{vbframe} {Source coding and cross-entropy}

\begin{itemize}
\item For a random source / distribution $p$, the minimal number of bits to optimally encode messages from is the entropy $H(p)$.
Expand Down

0 comments on commit c52d781

Please sign in to comment.