\documentclass[11pt]{article}
\input{headers02}
\lhead{CS 585-Theoretical CS Toolkit, SPRING 2023}
\rhead{Name: Type your name} %%% <-- REPLACE Hemanta K. Maji WITH YOUR NAME HERE
\usepackage[strict]{changepage}
\newcommand{\nextoddpage}{\checkoddpage\ifoddpage{\ \newpage\ \newpage}\else{\ \newpage}\fi}
\begin{document}
\title{Homework 2}
\date{}
\maketitle
\thispagestyle{fancy}
\pagestyle{fancy}
{\bfseries Collaborators :} \newline
\begin{enumerate}
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 1 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Sum of an Interesting Random Variable.} (20 points)
Let $\X$ be the random variable over the set of all natural numbers $\{ 1, 2, 3, \dotsc \}$ such that, for any natural number $i$, we have
$$\probX{\X=i}=3^{-i}.$$
Let $\S_n = \X\p1+\X\p2+\dotsi+\X\p n$, where $\X\p1,\X\p2,\dotsc,\X\p n$ are independent and identical to $\X$.
\begin{itemize}
\item (5 points) What is $\EX{\S_n}$?
\item (15 points) Upper-bound the following probability
$$\probX{\S_n-\EX{\S_n}\geq E} $$
\end{itemize}
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 2 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Coin-tossing: Word Problem.} (20 points)
Suppose you have access to a coin that outputs heads with probability $1/2$ and outputs tails with probability $1/2$.
Let $\S_n$ represent the {\em number of coin tosses needed} to see exactly $n$ heads.
\begin{itemize}
\item (5 points) What is $\EX{\S_n}$?
\item (15 points) Upper-bound the following probability
$$\EX{\S_n-\EX{\S_n} \geq E} $$
\end{itemize}
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 3 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Sum of Poisson.}
(25 points) Let $\Y$ be the random variable over sample space $\{0,1,2,\dots\}$ such that $\Pr[\Y=k]=\frac{e^{-\mu}\mu^k}{k!}$, for all $k\in\{0,1,2,\dotsc\}$.
This distribution is the {\em Poisson distribution} with parameter $\mu$.
\begin{itemize}
\item (3 points) Prove that the mean of the ``Poisson distribution with parameter $\mu$'' is equal to $\mu$.
\item (7 points) Prove that if $\Y_1$ and $\Y_2$ are independent Poisson distributions with parameters $\mu_1$ and $\mu_2$ respectively, then the random variable $\Y_1+\Y_2$ is also a Poisson distribution with parameter $(\mu_1+\mu_2)$.
\item (15 points) Let $\X$ be the Poisson distribution with mean $m/n$.
Let $\S_n \defeq \X\p1+\X\p2+\dotsi+\X\p n$, where $\X\p1,\X\p2,\dotsc,\X\p n$ are all independent and identical to $\X$.
Upper-bound the following probability
$$\probX{\S_n - \EX{\S_n} \geq E} $$
\end{itemize}
\noindent{\bfseries Solution.}\newline
\ %%% <-- ERASE THIS LINE AND WRITE YOUR SOLUTION HERE
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 4 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Another proof for Chernoff bound} (15 points)
Consider the following simple type of Chernoff Bound:\\
Suppose $\S_n=\sum_{i=1}^n \X\p i$ where $\X\p1,\X\p2,\dots,\X\p n$ are i.i.d Bernoulli random variables such that, $\X=\bern{p}$. Then, for any $\eps>0$, the following Chernoff bound states:
$$\Pr[\S_n\geq n(p+\eps)]\leq \exp\left({-n\kl{p+\eps}{p}}\right).$$
To prove the inequality above, we define i.i.d Bernoulli random variables ${\X'}\p1,{\X'}\p2,\dots,{\X'}\p n$ such that ${\X'}=\bern{p+\eps}$.
Define $\S'_n\defeq \sum_{i=1}^{n}{\X'}\p i$.
\begin{itemize}
\item (3 points) Define $h_k\defeq\frac{\Pr[\S'_n=k]}{\Pr[\S_n=k]}$ and obtain a simplified expression for $h_k$.
\item (7 points) For any $k\geq n(p+\eps)$, prove that $h_k\geq \exp\left({n\kl{p+\eps}{p}} \right)$.
\item (5 points) Use the inequality above to prove the Chernoff bound $$\Pr[\S_n\geq n(p+\eps)]\leq \exp\left({-n\kl{p+\eps}{p}}\right).$$
\end{itemize}
\noindent{\bfseries Solution.}\newline
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 5 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item {\bfseries Random Walk in 2-D.} (20 points)
Suppose an insect starts at $(0,0)$ at time $t=0$.
At time $t$, its position is described by $(\X(t),\Y(t))$.
At the next time step $t+1$, the insect uniformly at random moves to (a) $(\X(t)+3,\Y(t))$,
(b) $(\X(t)-3,\Y(t))$,
(c) $(\X(t),\Y(t)+3)$, or
(d) $(\X(t),\Y(t)-3)$.
State (5 points) and prove (15 points) a theorem that bounds how far from the origin the insect is at time $t=n$.
\noindent{\bfseries Solution.}\newline
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 6 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item{\bfseries Negatively Correlated Random Variables.} (20 points) Suppose $\X\colon \Omega \to \bbZ$ is a discrete random variable.
Let $f\colon \bbZ \to \bbZ$ and $g\colon
\bbZ \to \bbZ$ are two increasing and decreasing functions, respectively.
Define random variables $\bbR\defeq f(\X)$ and $\bbS\defeq g(\X)$ and assume that $\bbE[\R^2]<\infty$ and $\bbE[\S^2]<\infty$.
Prove that $\bbR$ and $\bbS$ are negatively correlated, \ie, $\bbE[\bbR\cdot\bbS] \leq \bbE[\bbR] \cdot \bbE[\bbS]$.
\noindent{\bfseries Solution.}\newline
\newpage
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%% PROBLEM 7 %%%%%%%%%%%%%%%%%%%%%%%%%%%%
%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
\item{\bfseries Chernoff bound for negatively correlated Bernoulli random variables.} \newline(Extra credit: 15 points)\newline
Consider {\em negatively correlated} random variables $(X_1,X_2,\dotsc,X_n)$, such that $X_i\in\zo$, for all $i\in\{1,2,\dots,n\}$.
Define $p_i = \bbE[X_i]$, for all $i\in\{1,2,\dotsc,n\}$, and $p=(p_1+p_2+\dotsi+p_n)/n$.
Prove that $$ \Pr\left[\sum_{i=1}^n X_i \geq (p+\eps)n\right] \leq \exp\left(-n\cdot\kl{p+\eps}{p}\right).$$
{\bfseries Useful facts.}
\begin{itemize}
\item Binary random variables:
Consider an arbitrary random variable $X\in\{0,1\}$.
Note that the random variable $X^k$ is identical to the random variable $X$, for all $k\in\{1,2,\dotsc\}$.
\item Negative correlation:
For any $I \subseteq \{1,2,\dotsc,n\}$, the negative correlation of $(X_1,X_2,\dotsc,X_n)$ implies that
$$ \bbE\left[\prod_{i\in I} X_i\right] \leq \prod_{i\in I} \bbE\left[X_i\right].$$
\item Moment generating function:
Note that
$$\exp\left(h\sum_{i=1}^n X_i \right) = \sum_{k\geq 0} \frac{h^k}{k!} \cdot \left(\sum_{i=1}^n X_i\right)^k$$
\end{itemize}
\noindent{\bfseries Solution.}\newline
\end{enumerate}
\end{document}