\documentclass{article}
\usepackage{amsmath}
\begin{document}
\section*{KL Divergence and Alpha Connections}
The \(\alpha\)-divergence between two probability distributions \( P \) and \( Q \) is defined as:
\[
D_\alpha(P \| Q) = \frac{4}{1 - \alpha^2} \left(1 - \sum_i P_i^{\frac{1+\alpha}{2}} Q_i^{\frac{1-\alpha}{2}}\right)
\]
\subsection*{Special Cases}
\subsubsection*{KL Divergence (\(\alpha = 1\))}
When \(\alpha = 1\), the \(\alpha\)-divergence becomes the KL divergence:
\[
D_1(P \| Q) = \sum_i P_i \log \frac{P_i}{Q_i}
\]
\subsubsection*{Reverse KL Divergence (\(\alpha = -1\))}
When \(\alpha = -1\), the \(\alpha\)-divergence becomes the reverse KL divergence:
\[
D_{-1}(P \| Q) = \sum_i Q_i \log \frac{Q_i}{P_i}
\]
\subsubsection*{Hellinger Distance (\(\alpha = 0\))}
When \(\alpha = 0\), the \(\alpha\)-divergence is related to the Hellinger distance:
\[
D_0(P \| Q) = 2 \left(1 - \sum_i \sqrt{P_i Q_i}\right)
\]
The squared Hellinger distance is:
\[
H^2(P, Q) = 1 - \sum_i \sqrt{P_i Q_i}
\]
\end{document}
카테고리 없음