% Sample Question document for STA256 \documentclass[12pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb %\usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} %\pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{Sample Questions: Limits}} STA256 Fall 2019. Copyright information is at the end of the last page. %\rule{6in}{.01in} % Width and height \rule{6in}{.005in} % Horizontal line (Width and height) % \vspace{3 mm} \end{center} \begin{enumerate} \item Let $X_n$ have an exponential distribution with parameter $\lambda=n$. Pick one of these and prove your answer. \begin{itemize} \item $X_n \stackrel{p}{\rightarrow} 0$ \item $X_n \stackrel{p}{\rightarrow} 1$ \item $X_n$ does not converge in probability to a constant. \end{itemize} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X_n$ have an exponential distribution with parameter $\lambda=n$, and let $Y_n = \frac{5X_n+2}{X_n+1}$. To what target does $Y_n$ converge in probability?} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X_n \sim$ Uniform(0,$n$). Does $X_n$ converge in probability to a constant? Answer Yes or No and prove your answer.} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X_n \sim$ Uniform(0,$n$) and let $Y_n = \frac{X_n}{X_n+1}$. Prove $Y_n \stackrel{p}{\rightarrow} 1$.} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X$ be a random variable with expected value $\mu$ and variance $\sigma^2$, and let $Y_n = \frac{X}{n}$. \begin{enumerate} \item Show $Y_n \stackrel{p}{\rightarrow} 0$. \vspace{50mm} \item What if $X$ is Cauchy? Does the result still hold? \end{enumerate} } \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X_n$ have a Poisson distribution with parameter $n\lambda$, where $\lambda>0$. This means $E(X_n)=Var(X_n) = n\lambda$. Let $Y_n = \frac{X_n}{n}$. \begin{enumerate} \item For what values of $y$ is $P(Y_n=y)>0$? \vspace{10mm} \item Does $Y_n$ converge in probability to a constant? Answer Yes or No and ``prove" your answer. \end{enumerate} } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item Let the discrete random variable $X_n$ have probability mass function {\Large \begin{displaymath} \renewcommand{\arraystretch}{1.3} p_{_X}(x) = \left\{ \begin{array}{ll} % ll means left left \frac{1}{n} & \mbox{for $ x = -n$} \\ \frac{n-2}{n} & \mbox{for $ x=0$} \\ \frac{1}{n} & \mbox{for $ x = n$} \end{array} \right. \renewcommand{\arraystretch}{1.0} \end{displaymath} } % End size \begin{enumerate} \item Try using the variance rule. \vspace{50mm} \item Prove $X_n \stackrel{p}{\rightarrow} 0$ anyway. \end{enumerate} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X_1, \ldots, X_n$ be independent and identically distributed random variables from an exponential distribution with parameter $\lambda$, so that $E(X_i) = 1/\lambda$ and $Var(X_i) = 1/\lambda^2$. Let $T_n = \frac{n}{\sum_{i=1}^n X_i}$. Show $T_n \stackrel{p}{\rightarrow} \lambda$. } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item Let the pairs $(X_1, Y_1), \ldots, (X_n,Y_n)$ be selected independently from a joint distribution with $E(X_i)=\mu_x$, $E(Y_i)=\mu_y$, $Var(X_i)=\sigma^2_x$, $Var(Y_i)=\sigma^2_y$, and $Cov(X_i,Y_i)=\sigma_{xy}$. Independence means that $X_i$ and $Y_i$ are independent of $X_j$ and $Y_j$ for $i \neq j$. % We have seen in homework that $Cov(\overline{X},\overline{Y}) = \sigma^2_{xy}/n $. \vspace{2mm} {\Large Let $Z_i = aX_i + bY_i$. You would expect $\overline{Z}_n \stackrel{p}{\rightarrow} a\mu_x + b\mu_y$. Is this true? Answer Yes or No and say why. } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Convergence in distribution %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $\renewcommand{\arraystretch}{1.3} f_{_{X_n}}(x) = \left\{ \begin{array}{ll} % ll means left left \frac{n+1}{n}x^{1/n} & \mbox{for } 0 \leq x \leq 1 \\ 0 & \mbox{otherwise} \\ \end{array} \right. \renewcommand{\arraystretch}{1.0}$ We have $X_n \stackrel{d}{\rightarrow} X$. Find the distribution of the target random variable $X$ from the definition. } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $\renewcommand{\arraystretch}{1.3} p_{_{X_n}}(x) = \left\{ \begin{array}{ll} % ll means left left \frac{n+3}{2(n+1)} & \mbox{for $x=0$} \\ \frac{n-1}{2(n+1)} & \mbox{for $x=1$} \\ \end{array} \right. \renewcommand{\arraystretch}{1.0}$ Show that $X_n \stackrel{d}{\rightarrow} X \sim$ Bernoulli($\theta = \frac{1}{2}$). } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X_n \sim$ Normal($\mu,n$). Does $X_n$ converge in distribution to a random variable? Answer Yes or No and show your work. } % End size % Maximum of standard exponentials goes to nothing also: HW. % Poisson approximation to binomial was lecture, make HW. \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X$ be a degenerate random variable with $P(X=c) = 1$. \begin{enumerate} \item Sketch $F_{_X}(x)$. \vspace{100mm} \item Find the moment-generating function of $X$. \end{enumerate} } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $X_n$ be an exponential random variable with parameter $\lambda=n$. It seems that $X_n \stackrel{d}{\rightarrow} X$. Find the distribution of the target random variable $X$ \begin{enumerate} \item From the definition. \vspace{100mm} \item Using moment-generating functions. \end{enumerate} } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Use moment-generating functions to prove the Law of Large Numbers. } % End size \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large Let $S$ be the sum of 16 independent Uniform(0,1) random variables. Find the approximate $P(S>12)$. You may use the fact that a Uniform(0,1) has expected value $\frac{1}{2}$ and variance $\frac{1}{12}$.} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\Large A multiple choice test has 50 questions with answers ABCD. If a student answers completely at random, what are the chances of getting 15 or more correct? You may use the fact that a Bernoulli($\theta$) has expected value $\theta$ and variance $\theta(1-\theta)$.} \pagebreak %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \item {\large In a walk-in medical clinic, the time a doctor spends per patient (including paperwork) comes from an unfamiliar skewed distribution with mean 5.1 and standard deviation 4.8 minutes. Find the maximum number of patients that should be scheduled so that the probability of working more than a 7 hour day will be less than 5\%. } \end{enumerate} % End of all the questions \vspace{150mm} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \noindent \begin{center}\begin{tabular}{l} \hspace{6in} \\ \hline \end{tabular}\end{center} This handout was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Mathematical and Computational Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \begin{center} \href{http://www.utstat.toronto.edu/~brunner/oldclass/256f19} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/256f19}} \end{center} \end{document} % The answer is a number. Circle your answer. % HW: a_n -> a_1 and a_n^2 -> a_2 < infty. Conditions of the variance rule satisfied, show a_n T_n -> a c