% 256f19Assignment10.tex Limits \documentclass[10pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb %\usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} % \pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{\href{http://www.utstat.toronto.edu/~brunner/oldclass/256f19}{STA 256f19} Assignment Ten}}\footnote{Copyright information is at the end of the last page.} \vspace{1 mm} \end{center} \noindent Please read Sections 4.2 and 4.4 in the text. Also, look over your lecture notes. The following homework problems are not to be handed in. They are preparation for the final exam. Use the formula sheet. %\vspace{5mm} \begin{enumerate} %%%%%%%%%%%%%%%%%%%%%%%%% Convergence in probability \item \label{j1} Let the continuous random variable $X_n$ have density \begin{displaymath} f_{_X}(x) = \left\{ \begin{array}{cl} % ll means left left \frac{n}{x^{n+1}} & \mbox{for } x \geq 1 \\ 0 & \mbox{otherwise} \end{array} \right. % Need that crazy invisible right period! \end{displaymath} Show $T_n \stackrel{p}{\rightarrow} 1$ from the definition. \item Let $X_1, \ldots, X_n$ be independent Uniform(0,1) random variables with $T_n = \min(X_i)$. \begin{enumerate} \item Find $F_{_{T_n}}(x)$. \item Show $T_n \stackrel{p}{\rightarrow} 0$ from the definition. \end{enumerate} \item Prove Markov's inequality for a discrete random variable. \item Chebyshev's inequality is on the formula sheet, though it may not have been mentioned in lecture. \begin{enumerate} \item Use Markov's inequality to prove Chebyshev's inequality. \item Let $X \sim$ Normal (0,1). Chebyshev's inequality says that $P\{|X| \geq 2 \}$ can be no more than \underline{\hspace{10mm}}. The actual probability is \underline{\hspace{10mm}}. \end{enumerate} \item Use Markov's inequality to prove the variance rule. \item Use the variance rule to prove the (weak) Law of Large Numbers. \item Let $X$ be a random variable with expected value $\mu$ and variance $\sigma^2$, and let $Y_n = \frac{X}{n}$. Show $Y_n \stackrel{p}{\rightarrow} 0$. \item Let $X_1, \ldots, X_n$ be a collection of independent gamma random variables with unknown parameter $\alpha$, and known $\lambda=6$. Find a random variable $T_n=g(\overline{X}_n)$ that converges in probability to $\alpha$. The statistic $T_n$ is a good way to estimate $\alpha$ from sample data. \item Let $X_n$ have a Poisson distribution with parameter $n\lambda$, where $\lambda>0$. This means $E(X_n)=Var(X_n) = n\lambda$. Let $Y_n = \frac{X_n}{n}$. Does $Y_n$ converge in probability to a constant? Answer Yes or No and ``prove" your answer. %%%%%%%%%%%%%%%%%%%%%%%%% Convergence in distribution \item Let the discrete random variable $X_n$ have probability mass function \begin{displaymath} p_{_{X_n}}(x) = \left\{ \begin{array}{cl} % ll means left left 1/2 & \mbox{for } x=\frac{1}{n} \\ 1/2 & \mbox{for } x=1+\frac{1}{n} \\ 0 & \mbox{Otherwise} \end{array} \right. \end{displaymath} \begin{enumerate} \item Show that $X_n \stackrel{d}{\rightarrow} X \sim$ Bernoulli($\frac{1}{2}$). Hint: Consider $x<0$, $01$ separately. \item Show that $\lim_{n \rightarrow \infty} M_{X_n}(t) = M_{_X}(t)$, which is another way to prove convergence in distribution. \end{enumerate} \item Let $\renewcommand{\arraystretch}{1.3} p_{_{X_n}}(x) = \left\{ \begin{array}{ll} % ll means left left \frac{n+3}{2(n+1)} & \mbox{for $x=0$} \\ \frac{n-1}{2(n+1)} & \mbox{for $x=1$} \\ \end{array} \right. \renewcommand{\arraystretch}{1.0}$ Show that $X_n \stackrel{d}{\rightarrow} X \sim$ Bernoulli($\theta = \frac{1}{2}$). \item Proposition: If the sequence of discrete random variables $\{X_n\}$ converges in distribution to the discrete random variable $X$, then $\displaystyle \lim_{n \rightarrow \infty} p_{_{X_n}}(x) = p_{_{X}}(x)$ for all real $x$. Either prove this statement is true, or prove by a simple counter-example that it is not true in general. \item Sometimes, a sequence of random variables does not converge in distribution to anything. Let $X_n$ have a continuous uniform distribution on $(0,n)$. Clearly, $\lim_{n \rightarrow \infty} F_{_{X_n}}(x)=0$ for $x \leq 0$. Find $\lim_{n \rightarrow \infty} F_{_{X_n}}(x)$ for $x>0$. Is $\lim_{n \rightarrow \infty} F_{_{X_n}}(x)$ continuous? Is it a cumulative distribution function? \item Consider a \emph{degenerate} random variable $X$, with $P(X=c)=1$. \begin{enumerate} \item What is $F_{_{X}}(x)$, the cumulative distribution function of $X$? Your answer must apply to all real $x$. \item Give a formula for $M_{_{X}}(t)$, the moment-generating function of $X$. \end{enumerate} \item For $n = 1, 2, \ldots$, let $X_n$ have a beta distribution with $\alpha=n$ and $\beta=1$. % , so that the density of $X_n$ is $nx^{n-1}$ for $0\leq x \leq 1$, and zero otherwise. \begin{enumerate} \item What is $\lim_{n \rightarrow \infty} F_{_{X_n}}(x)$ for $x<1$? \item What is $\lim_{n \rightarrow \infty} F_{_{X_n}}(x)$ for $x>1$? \item What do you conclude? \end{enumerate} \item Show that if $T_n \stackrel{p}{\rightarrow} c$, then $T_n \stackrel{d}{\rightarrow} c$. \item Show that if $T_n \stackrel{d}{\rightarrow} c$, then $T_n \stackrel{p}{\rightarrow} c$. To avoid unpleasant technicalities, you may assume that $T_n$ is continuous. \item Let $X_n$ be a binomial ($n,\theta_n$) random variable with $\theta_n=\frac{\lambda}{n}$, so that $n \rightarrow \infty$ and $\theta_n \rightarrow 0$ in such a way that the value of $n \, \theta_n=\lambda$ remains fixed. Using moment-generating functions, find the limiting distribution of $X_n$. \item Use moment-generating functions to prove the Law of Large Numbers. It starts like this. Let $X_1, X_2, \ldots$ be a sequence of independent random variables with common moment-generating function $M(t)$, and let $\overline{X}_n = \frac{1}{n} \sum_{i=1}^n X_i$. \begin{displaymath} \lim_{n \rightarrow \infty} M_{_{\overline{X}_n}}(t) = \lim_{n \rightarrow \infty} \exp \ln M_{_{\overline{X}_n}}(t) = \ldots \end{displaymath} %%%%%%%%%%%%%%%%%%%%%%%%% Applied CLT \item Let $X_1, \ldots, X_{64}$ be independent Poisson random variables with parameter $\lambda = 2$. Using the Central Limit Theorem, find the approximate probability that $\displaystyle P\left( \sum_{i=1}^{64} X_i > 100 \right)$. Answer: 0.9925 \item Let $X_1, \ldots, X_n$ be independent random variables from an unknown distribution with expected value 5.1 and standard deviation 4.8. Find the approximate probability that the sample mean will be greater than 6 for $n=25$. Answer: 0.1736 \item \label{jlast} The ``normal approximation to the binomial" says that if $X \sim$ Binomial($n,\theta$), then for large $n$, \begin{displaymath} Z = \frac{X-n\theta}{\sqrt{n\theta(1-\theta)}} \end{displaymath} may be treated as standard normal to obtain approximate probabilities. Where does this formula come from? Hint: What is the distribution of a sum of independent Bernoulli random variables? \newpage \vspace{5mm} \hrule %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Luai's questions \vspace{3mm} \item \label{L1} Suppose that $X_1,\ldots,X_{200}$ are independent continuous random variables with common density function $$ f(x)= \left\{ \begin{array}{cc} 2(1-x) & \ \ 070)$.\\ \item [(c)] Now define $$ Y_i= \left\{ \begin{array}{cc} 1 & \ \ \hbox{if } X_i > 0.9\\ 0 &\ \ \hbox{if } X_i \le 0.9 \end{array} \right.. $$ and set $T=Y_1+\ldots+Y_{200}$. Compute $P(T\ge3)$. \textbf{Hint:} $Y_i$ is a Bernoulli random of variable and $T$ is a binomial random variable. \end{enumerate} \item Suppose that $X_1,\ldots,X_{100}$ are independent continuous random variables from Uniform$(-1,1)$. Find $P\left(\sum_{i=1}^{100} X_i^2 \le 40\right)$. \item Suppose that $X_1,\ldots,X_{n}$ are independent continuous random variables from Uniform$(0,\theta)$. Let $Y_n=\max (X_1,\ldots,X_{n})$. \begin{enumerate} \item Prove that $\sqrt{Y_n} \stackrel{p}{\rightarrow} \sqrt{\theta}.$ \item Prove that $Z_n=n(\theta-Y_n)\stackrel{d}{\rightarrow} Z,$ where $Z \sim$Exponential$(\lambda=1/\theta)$. \end{enumerate} \item \label{Llast} Suppose that $X_1,\ldots,X_{n}$ are independent continuous random variables with cdf $F(x)$ and pdf $f(x)$. Let $Y_n=\max (X_1,\ldots,X_{n})$. Prove that $Z_n=n(1-F(Y_n))\stackrel{d}{\rightarrow} Z,$ where $Z \sim$Exponential$(\lambda=1)$. \end{enumerate} % End of all the questions % \vspace{60mm} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \vspace{3mm} \hrule \vspace{3mm} \noindent Questions \ref{j1} through \ref{jlast} were prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Mathematical and Computational Sciences, University of Toronto. They are licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Questions \ref{L1} through \ref{Llast} were prepared by Luai Al Labadi, Department of Mathematical and Computational Sciences, University of Toronto. I am not sure what his preferences are, so all rights to Luai's questions are reserved. The \LaTeX~source code is available from the course website: \begin{center} \href{http://www.utstat.toronto.edu/~brunner/oldclass/256f19} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/256f19}} \end{center} \end{document} \vspace{20mm} \hrule \vspace{3mm} \vspace{3mm} \hrule \item \label{beta} For $n = 1, 2, \ldots$, let $X_n$ have a beta distribution with $\alpha=n$ and $\beta=1$. % , so that the density of $X_n$ is $nx^{n-1}$ for $0\leq x \leq 1$, and zero otherwise. \begin{enumerate} \item Find the cumulative distribution function of $X_n$. \item Show that $X_n$ converges in probability to a constant, and find the constant. \end{enumerate} \item In a walk-in medical clinic, the time a doctor spends per patient (including paperwork) comes from an unfamiliar skewed distribution with mean 5.1 and standard deviation 4.8 minutes. Find the maximum number of patients that should be scheduled so that the probability of working more than a 7 hour day will be less than 5\%. \item Let $X_1, \ldots, X_{64}$ be independent Poisson random variables with parameters $n=4$ and $\theta=1/2$ Find $\displaystyle P\left( \sum_{i=1}^{64} X_i > 100 \right)$. Answer: 0.0023 \vspace{3mm} \hrule \vspace{3mm}