Skip to content

Commit

Permalink
rework Renyi div def
Browse files Browse the repository at this point in the history
  • Loading branch information
RemyDegenne committed Mar 18, 2024
1 parent 22269cd commit 447f060
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 25 deletions.
4 changes: 2 additions & 2 deletions blueprint/src/sections/hellinger.tex
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,8 @@ \chapter{Hellinger distance}
\end{lemma}

\begin{proof}
\uses{lem:renyi_eq_log_fDiv, lem:fDiv_mul, lem:fDiv_add_linear}
By Lemma~\ref{lem:renyi_eq_log_fDiv}, $R_{1/2}(\mu, \nu) = -2 \log (1 - \frac{1}{2} D_f(\nu, \mu))$ for $f : x \mapsto -2 (\sqrt{x} - 1)$. Using Lemma~\ref{lem:fDiv_mul}, $R_{1/2}(\mu, \nu) = -2 \log (1 - D_g(\nu, \mu))$ for $g(x) = 1 - \sqrt{x}$.
\uses{lem:fDiv_mul, lem:fDiv_add_linear}
$R_{1/2}(\mu, \nu) = -2 \log (1 - \frac{1}{2} D_f(\nu, \mu))$ for $f : x \mapsto -2 (\sqrt{x} - 1)$. Using Lemma~\ref{lem:fDiv_mul}, $R_{1/2}(\mu, \nu) = -2 \log (1 - D_g(\nu, \mu))$ for $g(x) = 1 - \sqrt{x}$.

It suffices then to show that $H^2(\mu, \nu) = D_g(\mu, \nu)$, which is true by an application of Lemma~\ref{lem:fDiv_add_linear}.
\end{proof}
Expand Down
29 changes: 6 additions & 23 deletions blueprint/src/sections/renyi_divergence.tex
Original file line number Diff line number Diff line change
Expand Up @@ -9,31 +9,16 @@ \chapter{Rényi divergences}
\begin{align*}
R_\alpha(\mu, \nu) = \left\{
\begin{array}{ll}
- \log(\nu\{x \mid \frac{d \mu}{d \nu}(x) > 0\}) & \text{ for } \alpha = 0
- \log(\nu\{x \mid \frac{d \mu}{d \nu}(x) > 0\}) & \text{for } \alpha = 0
\\
\KL(\mu, \nu) & \text{ for } \alpha = 1
\KL(\mu, \nu) & \text{for } \alpha = 1
\\
\frac{1}{\alpha - 1}\log \nu\left[\left(\frac{d \mu}{d \nu}\right)^\alpha\right] & \text {for } \alpha \in (0,1) \text{ , or } \alpha > 1 \text{ and }\mu \ll \nu
\\
+\infty & \text {otherwise}
\frac{1}{\alpha - 1} \log (1 + (\alpha - 1) D_f(\nu, \mu)) & \text{for } \alpha \in (0,+\infty) \backslash \{1\}
\end{array}\right.
\end{align*}
with $f : x \mapsto \frac{1}{\alpha - 1}(x^{\alpha} - 1)$.
\end{definition}

\begin{lemma}
\label{lem:renyi_eq_log_fDiv}
%\lean{}
%\leanok
\uses{def:fDiv, def:Renyi}
For $\nu$ a probability measure and either $\alpha \in (0,1)$ or $\mu \ll \nu$, $R_\alpha(\mu, \nu) = \frac{1}{\alpha - 1} \log (1 + (\alpha - 1) D_f(\nu, \mu))$ for $f : x \mapsto \frac{1}{\alpha - 1}(x^{\alpha} - 1)$, which is convex with $f(1)=0$. It is thus a monotone transformation of a f-divergence.

TODO: use this for the definition?
\end{lemma}

\begin{proof}
Unfold the definitions.
\end{proof}

\begin{lemma}
\label{lem:renyi_symm}
%\lean{}
Expand Down Expand Up @@ -96,8 +81,7 @@ \section{Properties inherited from f-divergences}
\end{theorem}

\begin{proof}
\uses{thm:fDiv_data_proc, lem:renyi_eq_log_fDiv}
By Lemma~\ref{lem:renyi_eq_log_fDiv}, $R_\alpha(\mu, \nu) = \frac{1}{\alpha - 1} \log (1 + (\alpha - 1) D_f(\nu, \mu))$ for $f : x \mapsto \frac{1}{\alpha - 1}(x^{\alpha} - 1)$.
\uses{thm:fDiv_data_proc}
The function $x \mapsto \frac{1}{\alpha - 1}\log (1 + (\alpha - 1)x)$ is non-decreasing and $D_f$ satisfies the DPI (Theorem~\ref{thm:fDiv_data_proc}), hence we get the DPI for $R_\alpha$.
\end{proof}

Expand All @@ -111,8 +95,7 @@ \section{Properties inherited from f-divergences}
\end{lemma}

\begin{proof}
\uses{cor:data_proc_event, lem:renyi_eq_log_fDiv}
By Lemma~\ref{lem:renyi_eq_log_fDiv}, $R_\alpha(\mu, \nu) = \frac{1}{\alpha - 1} \log (1 + (\alpha - 1) D_f(\nu, \mu))$ for $f : x \mapsto \frac{1}{\alpha - 1}(x^{\alpha} - 1)$.
\uses{cor:data_proc_event}
By Corollary~\ref{cor:data_proc_event}, $D_f(\mu, \nu) \ge D_f(\mu_E, \nu_E)$, hence $R_\alpha(\mu, \nu) \ge R_\alpha(\mu_E, \nu_E)$.
\end{proof}

Expand Down

0 comments on commit 447f060

Please sign in to comment.