\documentclass[11pt]{article}
\input{headers01}
\lhead{CS 592--ATK, SPRING 2022}
\rhead{Name: Hemanta K. Maji} %%% <-- REPLACE Hemanta K. Maji WITH YOUR NAME HERE
\usepackage[strict]{changepage}
\newcommand{\nextoddpage}{\checkoddpage\ifoddpage{\ \newpage\ \newpage}\else{\ \newpage}\fi}
\begin{document}
\title{Homework 1}
\date{}
\maketitle
\thispagestyle{fancy}
\pagestyle{fancy}
\begin{enumerate}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 1 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Upper-bound on Entropy.} (20 points)
Let $\Omega=\{1,2,\dotsc,N\}$.
Suppose $\X$ is a random variable over the sample space $\Omega$.
For shorthand, let us use $p_i = \probX{\X=i}$, for each $i\in\Omega$.
The entropy of the random variable $\X$ is defined to be the following function.
$$ \HH{\X} \defeq \sum_{i\in\Omega} -p_i\ln p_i$$
Use Jensen's inequality on the function $f(x)=\ln x$ to prove the following inequality.
$$ \HH{\X} \leq \ln N$$
Furthermore, equality holds if and only if we have $p_1=p_2=\dotsi=p_N$.
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 2 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Log-sum Inequality.} (20 points)
Let $\{a_1,\dotsc,a_N\}$ and $\{b_1,\dotsc,b_N\}$ be two sets of positive real numbers.
Use Jensen's inequality to prove the following inequality
$$\sum_{i=1}^N a_i \ln \frac{a_i}{b_i} \geq A\ln\frac AB,$$
where $A = \sum_{i=1}^Na_i$ and $B=\sum_{i=1}^Nb_i$.
Furthermore, equality holds if and only if $a_i/b_i$ is identical for all $i\in\{1,\dotsc,N\}$.
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 3 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Approximating Square-root.} (20 points)
Our objective is to find a (meaningful and tight) upper-bound for $f(x) = (1-x)^{1/2}$ using a quadratic function of the form
$$g(x) = 1 -\alpha x-\beta x^2$$
Use the Lagrange form of the Taylor's remainder theorem on $f(x)$ around $x=0$ to obtain the function $g(x)$.
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 4 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Lower-bounding Logarithm Function.} (20 points)
By Taylor's Theorem we have seen that the following upper-bound is true.
\begin{boxedalgo}
For all $\eps\in[0,1]$ and integer $k\geq 1$, we have
$$\ln(1-\eps) \leq -\eps - \frac{\eps^2}{2} - \dotsi - \frac{\eps^k}k$$
\end{boxedalgo}
We are interested in obtain a tight lower-bound for $\ln(1-\eps)$.
Prove the following lower-bound.
\begin{boxedalgo}
For all $\eps\in[0,1/2]$ and integer $k\geq 1$, we have
$$\ln(1-\eps) \geq \left(-\eps - \frac{\eps^2}{2} - \dotsi - \frac{\eps^k}k\right) - \frac{\eps^k}{k}$$
\end{boxedalgo}
(For visualization of this bound, follow this \href{https://www.desmos.com/calculator/o3iwil80fp}{link})
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 5 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Using Stirling Approximation.} (20 points)
Suppose we have a coin that outputs heads with probability $p$ and outputs tails with probability $q=1-p$.
We toss this coin (independently) $n$ times and record each outcome.
Let $\H$ be the random variable representing the number of heads in this experiment.
Note that the probability that we get $k$ heads is given by the following expression.
$$\probX{\H=k} = \choose n k p^k q^{n-k}$$
Assume that $k > pn$, and we shall represent $p'\defeq k/n=(p+\eps)$.
Using the Stirling approximation in the lecture notes, prove the following bound.
$$ \frac1{\sqrt{8n p'(1-p')}}\exp\left(-n\kl{p'}{p}\right) \leq \probX{\H=k} \leq \frac1{\sqrt{2\pi n p'(1-p')}}\exp\left(-n\kl{p'}{p}\right),$$
where $\kl{a}{b}$ (referred to as the Kullback--Leibler divergence) is defined as
$$ \kl{a}{b} \defeq a\ln\frac ab+(1-a)\ln\frac{1-a}{1-b}$$
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
\end{enumerate}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PLEASE LIST COLLABORATORS BELOW %%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
{\bfseries Collaborators :} \newline
% ENTER THEIR NAMES HERE
\end{document}