From f008138b61c4b00d41fdb159b40a8344b7b41cf6 Mon Sep 17 00:00:00 2001 From: Tobias-Brock Date: Fri, 10 Nov 2023 21:02:15 +0100 Subject: [PATCH] update entropy chapter --- .DS_Store | Bin 6148 -> 6148 bytes content/.DS_Store | Bin 10244 -> 10244 bytes content/chapters/.DS_Store | Bin 8196 -> 8196 bytes .../13_information_theory/13-02-entropy.md | 15 +++++++++++++++ .../{13-02-diffent.md => 13-03-diffent.md} | 4 ++-- .../{13-03-kl.md => 13-04-kl.md} | 4 ++-- ...-sourcecoding.md => 13-05-sourcecoding.md} | 4 ++-- ...ropy-kld.md => 13-06-cross-entropy-kld.md} | 4 ++-- .../{13-06-ml.md => 13-07-ml.md} | 4 ++-- ...07-mutual-info.md => 13-08-mutual-info.md} | 4 ++-- 10 files changed, 27 insertions(+), 12 deletions(-) create mode 100644 content/chapters/13_information_theory/13-02-entropy.md rename content/chapters/13_information_theory/{13-02-diffent.md => 13-03-diffent.md} (82%) rename content/chapters/13_information_theory/{13-03-kl.md => 13-04-kl.md} (86%) rename content/chapters/13_information_theory/{13-04-sourcecoding.md => 13-05-sourcecoding.md} (81%) rename content/chapters/13_information_theory/{13-05-cross-entropy-kld.md => 13-06-cross-entropy-kld.md} (83%) rename content/chapters/13_information_theory/{13-06-ml.md => 13-07-ml.md} (84%) rename content/chapters/13_information_theory/{13-07-mutual-info.md => 13-08-mutual-info.md} (85%) diff --git a/.DS_Store b/.DS_Store index 2fb1c616e8d0a006110d52899fd7e15050bab8fb..549743d35a1b005ee60121787f0bcfba7eac8d77 100644 GIT binary patch delta 94 zcmZoMXffEJ$;h~UvKC{Zn|O7#uCb1Sv9Up|jzYDifrXBOiJ3`lEhmSlvc7dte0EN5 eUVbM77%(zIXa-&=4Wqg?KVnp6+swxCj~@U6!VE0?7{Kx8`$T4)M2deq6KnreETN*&WiJ-X4m*%w)dfprI;Mxl)^2T0sHP$l z<~95;;_XT-l2I|?B$yC(5fO<<%*0!St5`C>B`z)!Zl23?cuwEX(;w-N3@vpt?5wNk zU0j~aEei+ja5$6?+PV5rUC^$ssm|wJrJlg@hG=YOS9dZs;JhjKbHo{Uukys)v-=Y9 z-7P)YL@Jrt6i;W&b_&*>d2NZ#RI)X(H*U%AL-FpcEOa?Oudfhj3jMm4N~NrjW?3s8 zl9rqtmoqXbH{^kg$a8ruALXlj2TE{J#tbYf@nf@qCDc;SN*ZV;!Um#jp@Z#ovV(5Y zWZ6X@1rBnE!whhmvz+Gw7rE5P5ZAcQ9Uk+9r;PG~G2SxH7baB{EmKQ9>Q%qmx=;gJ zqqW+kYc#5xwauxp+W(Ef>zV#{1TSSwKFBwjFa$H1!(6HeQAZ=ISx<|JZe=4|iLuS3 zPlYGxp_dH%$eZ~69N;L&IKfFy8Gv(!zyNT05my-GDmNMC7Pq;_eMWf5BSZ3xSG?mB ZpNsFkwyRiT|ClyEp83OP|Gz9BKLJkHy=VXc delta 702 zcmdVXPe{`N7{>AU`zEcS$;2(EG93vLC~6L5h{C#9Dbc|2Z=Phsm2Hp{|A44Cilmdj z>}n)Jbuek&Dn(HVLPUrTYEVYAs}SToL_ZVs=E0lq;eFt}eV#YwkNIbcY(`ynx24?U zEp~f6#=O&KBy5$|rciq%Tp#KW7^C(+!=2%<7Wm5K|HMCLL?LKYXXXvs4^D?0IVUbD z6tDQ?mINgt!}3@rS>~d zR&EpE4xQX1Ob-JLGQ=>WjMeajah@{43tsYyI14PY#4@X_v%w}m`NJ+rRZY|Lnyy*u z)Ev##5-nA)UeQ{;seZ$0Z#!1(a#&6#UXykCCcosjBsoheHuAYd85Q`b;i_q@;|2{h z(riliok2PXahE9FrnQ%TMu_o%hdg2oqu3Phd!O-~Nv4?QH8Z^79q*ZCj(M|zPkiPp hTl|P8F1W36i+eA{5e-GcT@8WzU1lDA^7Oh~`~&Ybqul@i diff --git a/content/chapters/.DS_Store b/content/chapters/.DS_Store index 62505c6cf6cea137c8259fcef6c86b97862e9bb9..6cb5f2a1cc65c97a62e9bcbf3493cae7b2dbdaa5 100644 GIT binary patch delta 270 zcmZp1XmOa}FUrNhz`)4BAi%(o$B@R5&rrmW%aFLSa2or>2Hwr=94s8xAQhqvh786G z@eG;Bsu)Uu{CuEDJdl;akP6manN(g}kd%|31k|;C@&utmHSy|dU1J>uV`GC_9ffL3 z0}CAm6ElY&2sRP=VTJ8NvC?8%va!HnU57W0|~AXw~Fe5xb2g@=O5oWkdM@ delta 88 zcmV-e0H^fDN+;2m}ZL2$LWZ uCX-ze3zLfyb_X*!EFd#8GLr}sKeHne!UL1U967Og69cmc81@9Qcn1T3*cL$m diff --git a/content/chapters/13_information_theory/13-02-entropy.md b/content/chapters/13_information_theory/13-02-entropy.md new file mode 100644 index 00000000..942c4fd4 --- /dev/null +++ b/content/chapters/13_information_theory/13-02-entropy.md @@ -0,0 +1,15 @@ +--- +title: "Chapter 13.02: Entropy" +weight: 13002 +--- +We continue our discussion about entropy and introduce joint entropy, the uniqueness theorem and the maximum entropy principle. + + + +### Lecture video + +{{< video id="" >}} + +### Lecture slides + +{{< pdfjs file="https://github.com/slds-lmu/lecture_sl/raw/main/slides-pdf/slides-info-entropy2.pdf" >}} \ No newline at end of file diff --git a/content/chapters/13_information_theory/13-02-diffent.md b/content/chapters/13_information_theory/13-03-diffent.md similarity index 82% rename from content/chapters/13_information_theory/13-02-diffent.md rename to content/chapters/13_information_theory/13-03-diffent.md index 147d45de..333806df 100644 --- a/content/chapters/13_information_theory/13-02-diffent.md +++ b/content/chapters/13_information_theory/13-03-diffent.md @@ -1,6 +1,6 @@ --- -title: "Chapter 13.02: Differential Entropy" -weight: 13002 +title: "Chapter 13.03: Differential Entropy" +weight: 13003 --- In this section, we extend the definition of entropy to the continuous case. diff --git a/content/chapters/13_information_theory/13-03-kl.md b/content/chapters/13_information_theory/13-04-kl.md similarity index 86% rename from content/chapters/13_information_theory/13-03-kl.md rename to content/chapters/13_information_theory/13-04-kl.md index 9e772be8..d894f021 100644 --- a/content/chapters/13_information_theory/13-03-kl.md +++ b/content/chapters/13_information_theory/13-04-kl.md @@ -1,6 +1,6 @@ --- -title: "Chapter 13.03: Kullback-Leibler Divergence" -weight: 13003 +title: "Chapter 13.04: Kullback-Leibler Divergence" +weight: 13004 --- The Kullback-Leibler divergence (KL) is an important quantity for measuring the difference between two probability distributions. We discuss different intuitions for KL and relate it to risk minimization and likelihood ratios. diff --git a/content/chapters/13_information_theory/13-04-sourcecoding.md b/content/chapters/13_information_theory/13-05-sourcecoding.md similarity index 81% rename from content/chapters/13_information_theory/13-04-sourcecoding.md rename to content/chapters/13_information_theory/13-05-sourcecoding.md index d977103b..17b1ffb1 100644 --- a/content/chapters/13_information_theory/13-04-sourcecoding.md +++ b/content/chapters/13_information_theory/13-05-sourcecoding.md @@ -1,6 +1,6 @@ --- -title: "Chapter 13.04: Entropy and Optimal Code Length" -weight: 13004 +title: "Chapter 13.05: Entropy and Optimal Code Length" +weight: 13005 --- In this section, we introduce source coding and discuss how entropy can be understood as optimal code length. diff --git a/content/chapters/13_information_theory/13-05-cross-entropy-kld.md b/content/chapters/13_information_theory/13-06-cross-entropy-kld.md similarity index 83% rename from content/chapters/13_information_theory/13-05-cross-entropy-kld.md rename to content/chapters/13_information_theory/13-06-cross-entropy-kld.md index 1a740699..0d5daed6 100644 --- a/content/chapters/13_information_theory/13-05-cross-entropy-kld.md +++ b/content/chapters/13_information_theory/13-06-cross-entropy-kld.md @@ -1,6 +1,6 @@ --- -title: "Chapter 13.05: Cross-Entropy, KL and Source Coding" -weight: 13005 +title: "Chapter 13.06: Cross-Entropy, KL and Source Coding" +weight: 13006 --- We introduce cross-entropy as a further information-theoretic concept and discuss the connection between entropy, cross-entropy, and Kullback-Leibler divergence. diff --git a/content/chapters/13_information_theory/13-06-ml.md b/content/chapters/13_information_theory/13-07-ml.md similarity index 84% rename from content/chapters/13_information_theory/13-06-ml.md rename to content/chapters/13_information_theory/13-07-ml.md index 511e4b2c..6feb14ea 100644 --- a/content/chapters/13_information_theory/13-06-ml.md +++ b/content/chapters/13_information_theory/13-07-ml.md @@ -1,6 +1,6 @@ --- -title: "Chapter 13.06: Information Theory for Machine Learning" -weight: 13006 +title: "Chapter 13.07: Information Theory for Machine Learning" +weight: 13007 --- In this section, we discuss how information-theoretic concepts are used in machine learning and demonstrate the equivalence of KL minimization and maximum likelihood maximization, as well as how (cross-)entropy can be used as a loss function. diff --git a/content/chapters/13_information_theory/13-07-mutual-info.md b/content/chapters/13_information_theory/13-08-mutual-info.md similarity index 85% rename from content/chapters/13_information_theory/13-07-mutual-info.md rename to content/chapters/13_information_theory/13-08-mutual-info.md index e563fa1d..2d5d6ea2 100644 --- a/content/chapters/13_information_theory/13-07-mutual-info.md +++ b/content/chapters/13_information_theory/13-08-mutual-info.md @@ -1,6 +1,6 @@ --- -title: "Chapter 13.07: Joint Entropy and Mutual Information" -weight: 13007 +title: "Chapter 13.08: Joint Entropy and Mutual Information" +weight: 13008 --- Information theory also provides means of quantifying relations between two random variables that extend the concept of (linear) correlation. We discuss joint entropy, conditional entropy, and mutual information in this context.