\documentclass[11pt]{article}
\input{math_macros}
\usepackage{amssymb}
\usepackage[margin=1in]{geometry}
\begin{document}
\begin{flushleft}
\fbox{
\begin{minipage}{\textwidth}
{\bf CS 174: Combinatorics and Discrete Probability} \hfill Fall 2012 \bigskip \\
\centering{\Large Homework 3} \medskip \\
\centering{Due: Thursday, September 20, 2012 by {\bf 9:30am}}
\end{minipage}
} \bigskip \\
\end{flushleft}
\noindent{\it \textbf{Instructions}}: {\it You should upload your homework
solutions on bspace. You are strongly encouraged to type out your solutions
using \LaTeX . You may also want to consider using mathematical mode typing in
some office suite if you are not familiar with \LaTeX . If you must handwrite
your homeworks, please write clearly and legibly. We will not grade homeworks
that are unreadable. You are encouraged to work in groups of 2-4, but you {\bf
must} write solutions on your own. Please review the homework policy carefully
on the class homepage.} \medskip \\
\noindent {\bf Note}: You \emph{must} justify all your answers. In particular, you will get
no credit if you simply write the final answer without any explanation. \medskip
\\
\noindent {\bf Problem 1}. {\it (Exercise 3.5 from MU)} Given any two random
variables $X$ and $Y$, by the linearity of expectation we have $\E[X-Y] = \E[X]
- \E[Y]$. Prove that, when $X$ and $Y$ are independent, $\var[X - Y] = \var[X] +
\var[Y]$. \medskip \\
\noindent {\bf Problem 2}. {\it (Exercise 3.15 from MU)} Let the random variable
$X$ be representable as a sum of random variables $X = \sum_{i=1}^n X_i$. Show
that, if $\E[X_i X_j] = \E[X_i]\E[X_j]$ for every pair of $i$ and $j$ with $1
\leq i < j \leq n$, then $\var[X] = \sum_{i=1}^n \var[X_i]$. \medskip \\
\noindent {\bf Problem 3}. (Exercise 3.19) Let $Y$ be a non-negative
integer-valued random variable with positive expectation. Prove
\[ \frac{\E[Y]^2}{\E[Y^2]} \leq \Pr[Y \neq 0] \leq \E[Y] \] \medskip \\
\noindent {\bf Problem 4}. {\it (Exercise 3.20 from MU)}
\begin{enumerate}
%
\item[(a)] Chebyshev's inequality uses the variance of a random variable to
bound its deviation from its expectation. We can also use higher moments.
Suppose that we have a random variable $X$ and an even integer $k$ for which
$\E[(X - \E[X])^k]$ is finite. Show that
%
\[ \Pr\left( |X - \E[X]| \geq t \sqrt[k]{\E[(X - \E[X])^k]} \right) \leq
\frac{1}{t^k} \]
%
\item[(b)] Why is it difficult to derive a similar inequality when $k$ is odd?
%
\end{enumerate} \medskip
\noindent {\bf Problem 5}. {\it (Exercise 3.21 from MU)} A fixed point of a
permutation $\pi : [1, n] \rightarrow [1, n]$ is a value for which $\pi(x) = x$.
Find the variance in the number of fixed points of a permutation chosen
uniformly at random from all permutations. ({\it Hint}: Let $X_i$ be $1$ if
$\pi(i) = i$, so that $\sum_{i=1}^n X_i$ is the number of fixed points. You
cannot use linearity to find $\var[\sum_{i=1}^n X_i]$, but you can calculate it
directly.) \medskip \\
\noindent {\bf Problem 6}. {\it (Exercise 3.25 from MU)} The weak law of large
numbers states that, if $X_1, X_2, X_3, \ldots$ are independent and identically
distributed random variables with mean $\mu$ and standard deviation $\sigma$,
then for any constant $\epsilon > 0$ we have
%
\[ \lim_{n \rightarrow \infty} \Pr \left( \left| \frac{X_1 + X_2 + \cdots +
X_n}{n} - \mu \right| > \epsilon \right) = 0. \]
Use Chebychev's inequality to prove the weak law of large numbers.
\end{document}