% 256f19Assignment9.tex Moment-generating functions \documentclass[11pt]{article} %\usepackage{amsbsy} % for \boldsymbol and \pmb %\usepackage{graphicx} % To include pdf files! \usepackage{amsmath} \usepackage{amsbsy} \usepackage{amsfonts} \usepackage[colorlinks=true, pdfstartview=FitV, linkcolor=blue, citecolor=blue, urlcolor=blue]{hyperref} % For links \usepackage{fullpage} % \pagestyle{empty} % No page numbers \begin{document} %\enlargethispage*{1000 pt} \begin{center} {\Large \textbf{\href{http://www.utstat.toronto.edu/~brunner/oldclass/256f19}{STA 256f19} Assignment Nine}}\footnote{Copyright information is at the end of the last page.} \vspace{1 mm} \end{center} \noindent Please read Section 3.4 in the text. Also, look over your lecture notes. The following homework problems are not to be handed in. They are preparation for Term Test 3 and the final exam. Use the formula sheet. %\vspace{5mm} \begin{enumerate} %%%%%%%% Derive, generate moments \item \label{j1} Let $X$ have a moment-generating function $M_{_X}(t)$ and let $a$ be a constant. Show $M_{_{aX}}(t) = M_{_X}(at)$. \item Let $X$ have a moment-generating function $M_{_X}(t)$ and let $a$ be a constant. Show $M_{_{a+X}}(t) = e^{at}M_{_X}(t)$. \item Let $X_1$ and $X_2$ be independent, discrete random variables, and let $Y = g(X_1)+h(X_2)$. Show $M_{_Y}(t) = M_{_{g(X_1)}}(t) \, M_{_{h(X_2)}}(t)$. Because the random variables are discrete, you will add rather than integrating. \item In the following table, derive the moment-generating functions (given on the formula sheet), and then use them to obtain the expected values and variances. To make the task shorter, notice that the Bernoulli is a special case of the binomial, and that the exponential and chi-squared distributions are special cases of the gamma. Chi-squared is a gamma with $\alpha=\nu/2$ and $\lambda = \frac{1}{2}$; exponential is a gamma with $\alpha=1$. Do the general cases first and then just write the answer for the special cases. \renewcommand{\arraystretch}{1.5} \begin{tabular}{|l|c|c|c|} \hline \textbf{Distribution} & \textbf{MGF} $M_x(t)$ & $E(X)$ & $Var(X)$ \\ \hline Bernoulli ($\theta$) & & & \\ \hline Binomial ($n,\theta$) & & & \\ \hline %Geometric ($p$) & & & \\ \hline Poisson ($\lambda$) & & & \\ \hline Exponential ($\lambda$) & & & \\ \hline Gamma ($\alpha,\lambda$) & & & \\ \hline Normal ($\mu,\sigma^2$) & & & \\ \hline Chi-squared ($\nu$) & & & \\ \hline \end{tabular} \renewcommand{\arraystretch}{1.0} \item Let $X$ be a geometric random variable with parameter $\theta$. \begin{enumerate} \item Find the moment-generating function. \item Differentiate to obtain $E(X)$. \end{enumerate} %%%%%%%% Identify to get distribution \item Let $X \sim N(\mu,\sigma^2)$. Show $Z = \frac{X-\mu}{\sigma} \sim N(0,1)$ using moment-generating functions. \item Let $X_1 \sim N(\mu_1,\sigma^2_1)$ and $X_2 \sim N(\mu_2,\sigma^2_2)$ be independent. Find the distribution of $Y = X_1+3X_2$. \item Let $X_1, \ldots, X_n$ be independent Bernoulli($\theta$) random variables. Find the distribution of $Y = \sum_{i=1}^n X_i$. \item Let $X_1, \ldots, X_n$ be independent Normal($\mu,\sigma^2$) random variables. Find the distribution of the sample mean $\overline{X} = \frac{1}{n}\sum_{i=1}^n X_i$. \item Let $X_1, \ldots, X_n$ be independent Gamma($\alpha,\lambda$) random variables. Find the distribution of the sample mean $\overline{X} = \frac{1}{n}\sum_{i=1}^n X_i$. \item Let $Z \sim N(0,1)$ and let $Y = Z^2$. Find the distribution of $Y$ using moment-generating functions. \item \label{jlast} Let $X$ be a \emph{degenerate} random variable with $P(X=\mu) = 1$. \begin{enumerate} \item Find the moment-generating function. \item Differentiate to obtain $E(X)$ and $Var(X)$. Do these answers make sense? \item Comparing this to the moment-generating function of a normal, one can say that in a weird way, a degenerate distribution is normal with variance \underline{\hspace{10mm}}. \end{enumerate} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% Luai's questions \hrule \item \label{L1} Suppose that $X$ and $Y$ are discrete independent random variables with the following moment generating functions: $$M_X(t)=E(e^{tX})=\frac{1}{6}e^{t}+\frac{2}{6}e^{2t}+\frac{3}{6}e^{3t}$$ $$M_Y(t)=E(e^{tY})=\frac{1}{10}e^{-t}+\frac{4}{10}e^{2t}+\frac{1}{2}e^{3t}.$$ Using the moment generating function, find the distribution of \begin{enumerate} \item $Z=X+Y$. \item $U=X-Y$. \end{enumerate} \item Let $X$ and $Y$ be discrete random variables such that $$p_X(x)=\frac{1}{3}, \ \ x=-1,0,1$$ and $$p_Y(y)=\frac{1}{2}, \ \ y=2,4.$$ Let $Z=X+Y$. \begin{enumerate} \item Using the probability mass functions of $X$ and $Y$, find the probability mass function of $Z$. \item Find the moment generating function of $Z$. \item Using part (b), find the probability mass function of $Z$. Does your answer agree with (a)? \end{enumerate} \item Let $X$ and $Y$ be independent random variables, both with $Poisson(\lambda)$ distribution, for some $\lambda>0$. Define $Z=X+Y$. \begin{enumerate} \item Find the distribution of $Z$ by using the moment generating function. \item For any non-negative integer $n$, find the conditional probability mass function of $X$ given $Z=n$. \item State the name of the conditional distribution of $X$ given $Z=n$. \end{enumerate} \item Let $X$ be a continuous random variable with pdf $f(x)=k e^{-|x|}, -\infty