% \documentclass[serif]{beamer} % Serif for Computer Modern math font. \documentclass[serif, handout]{beamer} % Handout to ignore pause statements. \hypersetup{colorlinks,linkcolor=,urlcolor=red} \usefonttheme{serif} % Looks like Computer Modern for non-math text -- nice! \setbeamertemplate{navigation symbols}{} % Suppress navigation symbols % \usetheme{Berlin} % Displays sections on top \usetheme{Frankfurt} % Displays section titles on top: Fairly thin but still swallows some material at bottom of crowded slides %\usetheme{Berkeley} \usepackage[english]{babel} \usepackage{amsmath} % for binom \usepackage{amsfonts} % for \mathbb{R} The set of reals % \usepackage{graphicx} % To include pdf files! % \definecolor{links}{HTML}{2A1B81} % \definecolor{links}{red} \setbeamertemplate{footline}[frame number] \mode \title{Expected Value, Variance and Covariance (Sections 3.1-3.3)\footnote{ This slide show is an open-source document. See last slide for copyright information.}} \subtitle{STA 256: Fall 2019} \date{} % To suppress date \begin{document} \begin{frame} \titlepage \end{frame} \begin{frame} \frametitle{Overview} \tableofcontents \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \section{Expected Value} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Definition for Discrete Random Variables} %\framesubtitle{Discrete or Continuous} The expected value of a discrete random variable is \pause {\LARGE \begin{displaymath} E(X) = \sum_x x \, p_{_X}(x) \end{displaymath} \pause } % End size \begin{itemize} \item Provided $\sum_x |x| \, p_{_X}(x) < \infty$. \pause If the sum diverges, the expected value does not exist. \pause \item Existence is only an issue for infinite sums (and integrals over infinite intervals). \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Expected value is an average} \pause %\framesubtitle{} \begin{itemize} \item Imagine a very large jar full of balls. \pause This is the population. \pause \item The balls are numbered $x_1, \ldots, x_N$. \pause These are measurements carried out on members of the population. \pause \item Suppose for now that all the numbers are different. \pause \item A ball is selected at random; all balls are equally likely to be chosen. \pause \item Let $X$ be the number on the ball selected. \pause \item $P(X = x_i) = \frac{1}{N}$. \pause \end{itemize} \begin{eqnarray*} E(X) & = & \sum_x x \, p_{_X}(x) \\ \pause & = & \sum_{i=1}^N x_i \frac{1}{N} \\ \pause & = & \frac{\sum_{i=1}^N x_i}{N} \\ \end{eqnarray*} % The common average, or arithmetic mean. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{For the jar full of numbered balls, $E(X) = \frac{\sum_{i=1}^N x_i}{N}$} %\framesubtitle{} {\footnotesize \begin{itemize} \item This is the common average, or arithmetic mean. \pause \item Suppose there are ties. \pause \item Unique values are $v_i$, for $i=1, \ldots, n$. \pause \item Say $n_1$ balls have value $v_1$, \pause and $n_2$ balls have value $v_2$, and \ldots \pause $n_n$ balls have value $v_n$. \pause \item Note $n_1 + \cdots + n_n = N$\pause, and $P(X=v_j) = \frac{n_j}{N}$. \pause \end{itemize} \begin{eqnarray*} E(X) & = & \frac{\sum_{i=1}^N x_i}{N} \\ \pause & = & \sum_{j=1}^n n_j v_j \frac{1}{N} \\ \pause & = & \sum_{j=1}^n v_j \frac{n_j}{N} \\ \pause & = & \sum_{j=1}^n v_j P(X=v_j) \\ \end{eqnarray*} } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Compare $E(X)=\sum_{j=1}^n v_j P(X=v_j) \mbox{ and } \sum_x x \, p_{_X}(x)$} \pause %\framesubtitle{} \begin{itemize} \item Expected value is a generalization of the idea of an average, or mean. \pause \item Specifically a \emph{population} mean. \item It is often just called the ``mean." \end{itemize} \end{frame} % Gambling here! %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Gambling Interpretation} %\framesubtitle{} \begin{itemize} \item Play a game for money. \pause \item Could be a casino game, or a business game like placing a bid on a job. \item Let $X$ be the return -- that is, profit. \pause \item Could be negative. \pause \item Play the game over and over (independently). \pause \item The long term average return is $E(X)$. \pause \item This follows from the Law of Large Numbers, a theorem that will be proved later. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Fair Game} %\framesubtitle{} {\LARGE \emph{Definition:} A ``fair" game is one with expected value equal to zero. } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Rational Behaviour} %\framesubtitle{} \begin{itemize} \item Maximize expected return (it does not have to be money) \pause \item At least, don't play any games with a negative expected value. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Example} %\framesubtitle{} \begin{itemize} \item Place a \$20 bet, roll a fair die. \item If it's a 6, you get your \$20 back and an additional \$100. \item If it's not a 6, you lose your \$20. \item Is this a fair game? \pause \end{itemize} \begin{eqnarray*} E(X) & = & (-20)\frac{5}{6} + (100)\frac{1}{6} \\ \pause & = & \frac{1}{6}(-100+100) \\ \pause & = & 0 \end{eqnarray*} Yes. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Definition for Continuous Random Variables} %\framesubtitle{Discrete or Continuous} The expected value of a continuous random variable is \pause {\LARGE \begin{displaymath} E(X) = \int_{-\infty}^\infty x \, f_{_X}(x) \, dx \end{displaymath} \pause } % End size \begin{itemize} \item Provided $ \int_{-\infty}^\infty |x| \, f_{_X}(x) \, dx < \infty$. \pause If the integral diverges, the expected value does not exist. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{} %\framesubtitle{} {\LARGE The expected value is the physical balance point. } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Sometimes the expected value does not exist} \framesubtitle{Need $ \int_{-\infty}^\infty |x| \, f_{_X}(x) \, dx < \infty$} \pause For the Cauchy distribution, $f(x) = \frac{1}{\pi(1+x^2)}$. \pause \begin{eqnarray*} E(|X|) & = & \int_{-\infty}^\infty |x| \, \frac{1}{\pi(1+x^2)} \, dx \\ \pause & = & 2 \int_0^\infty \frac{x}{\pi(1+x^2)} \, dx \\ \pause && u = 1+x^2, ~du = 2x \, dx \\ \pause & = & \frac{1}{\pi} \int_1^\infty \frac{1}{u} \, du \\ \pause & = & \frac{1}{\pi} \ln u |_1^\infty \\ \pause & = & \infty - 0 \pause = \infty \pause \end{eqnarray*} So to speak. \pause When we say an integral ``equals" infinity, we just mean it is unbounded above. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Existence of expected values} \pause %\framesubtitle{} \begin{itemize} \item If it is not mentioned in a general problem, existence of expected values is assumed. \pause \item Sometimes, the answer to a specific problem is ``Oops! The expected value dies not exist." \pause \item You never need to show existence unless you are explicitly asked to do so. \pause \item If you do need to deal with existence, Fubini's Theorem can help with multiple sums or integrals. \pause \begin{itemize} \item Part One says that if the integrand is positive, the answer is the same when you switch order of integration, even when the answer is ```$\infty$." \pause \item Part Two says that if the integral converges absolutely, you can switch order of integration. \pause For us, absolute convergence just means that the expected value exists. \end{itemize} \end{itemize} \end{frame} % Now a break for some examples, called sample problems. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% % Markov's inequalty is here in the text. \begin{frame} \frametitle{The change of variables formula for expected value} \framesubtitle{Theorems 3.1.1 and 3.2.1} Let $X$ be a random variable and $Y=g(X)$. There are two ways to get $E(Y)$. \pause \vspace{3mm} \begin{enumerate} \item Derive the distribution of $Y$ \pause and compute \begin{displaymath} E(Y) = \int_{-\infty}^\infty y \, f_{_Y}(y) \, dy \end{displaymath} \pause \item Use the distribution of $X$ \pause and calculate \begin{displaymath} E(g(X)) = \int_{-\infty}^\infty g(x) \, f_{_X}(x) \, dx \end{displaymath} \pause \end{enumerate} Big theorem: These two expressions are equal. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{The change of variables formula is very general} \framesubtitle{Including but not limited to} \pause \begin{columns} % Use Beamer's columns to use more of the margins! \column{1.2\textwidth} \begin{itemize} \item[] $E(g(X)) = \int_{-\infty}^\infty g(x) \, f_{_X}(x) \, dx$ \pause \item[] \item[] $E(g(\mathbf{X})) = \int_{-\infty}^\infty \cdots \int_{-\infty}^\infty g(x_1, \ldots, x_p) \, f_{_\mathbf{X}}(x_1, \ldots, x_p) \, dx_1 \ldots dx_p $ \pause \item[] \item[] $E\left(g(X)\right) = \sum_x g(x) p_{_X}(x)$ \pause \item[] \item[] $E(g(\mathbf{X})) = \sum_{x_1} \cdots \sum_{x_p} g(x_1, \ldots, x_p) \, p_{_\mathbf{X}}(x_1, \ldots, x_p) $ \end{itemize} \end{columns} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Example: Let $Y=aX$. Find $E(Y)$.} \pause %\framesubtitle{} \begin{eqnarray*} E(aX) & = & \sum_x ax \, p_{_X}(x) \\ \pause & = & a \, \sum_x x \, p_{_X}(x) \\ \pause & = & a \, E(X) \pause \end{eqnarray*} So $E(aX) = aE(X)$. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Show that the expected value of a constant is the constant.} \pause %\framesubtitle{} \begin{eqnarray*} E(a) & = & \sum_x a \, p_{_X}(x) \\ \pause & = & a \, \sum_x \, p_{_X}(x) \\ \pause & = & a \cdot 1 \\ \pause & = & a \pause \end{eqnarray*} So $E(a) = a$. \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{$E(X+Y) = E(X)+E(Y)$} \pause %\framesubtitle{} \begin{columns} \column{1.2\textwidth} % Single wide column to use more of the margin \begin{eqnarray*} E(X+Y) & = & \int_{-\infty}^\infty \int_{-\infty}^\infty (x+y) \, f_{_{X,Y}}(x,y) \, dx \, dy \\ \pause & = & \int_{-\infty}^\infty \int_{-\infty}^\infty x \, f_{_{X,Y}}(x,y) \, dx \, dy + \int_{-\infty}^\infty \int_{-\infty}^\infty y \, f_{_{X,Y}}(x,y) \, dx \, dy \\ \pause & = & E(X) + E(Y) \end{eqnarray*} \end{columns} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Putting it together} %\framesubtitle{} {\LARGE \begin{displaymath} E(a + bX + cY) = a + b \, E(X) + c \, E(Y) \end{displaymath} \pause } % End size And in fact, \pause {\LARGE \begin{displaymath} E\left(\sum_{i=1}^n a_iX_i \right) = \sum_{i=1}^n a_iE(X_i) \end{displaymath} \pause } % End size You can move the expected value sign through summation signs and constants. \pause Expected value is a linear transformation. % E(X+Y) = E(X)+E(Y) and E(aX) = aE(X) \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{$ E\left(\sum_{i=1}^n X_i \right) = \sum_{i=1}^n E(X_i)$, but in general} \pause %\framesubtitle{} {\LARGE \begin{displaymath} E(g(X)) \neq g(E(X)) \end{displaymath} \pause } % End size \vspace{6mm} Unless $g(x)$ is a linear function. \pause So for example, \pause \begin{itemize} \item[] $E(\ln(X)) \neq \ln(E(X))$ \pause \item[] $E(\frac{1}{X}) \neq \frac{1}{E(X)}$ \pause \item[] $E(X^k) \neq \left( E(X)\right)^k$ \pause \end{itemize} \vspace{3mm} That is, the statements are not true in general. They might be true for some distributions. \end{frame} % Should I talk about expected values of indicators? I'm running short of time in 2018. (And 2019). %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \section{Variance} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Variance of a random variable $X$} \pause % \framesubtitle{Definition} Let $E(X) = \mu$ (The Greek letter ``mu"). \pause {\LARGE \begin{displaymath} Var(X) = E\left( (X-\mu)^2 \right) \end{displaymath} \pause } % End size \begin{itemize} \item The average (squared) difference from the average. \pause \item It's a measure of how spread out the distribution is. \pause \item Another measure of spread is the standard deviation, the square root of the variance. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Variance rules} \pause %\framesubtitle{} \begin{itemize} \item[] $Var(a+bX) = b^2Var(X)$ \pause \item[] \item[] $Var(X) = E(X^2)-[E(X)]^2$ \end{itemize} \end{frame} % More sample questions here. %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Conditional Expectation} \framesubtitle{The idea} \pause Consider jointly distributed random variables $X$ and $Y$. \pause \begin{itemize} \item For each possible value of $X$, there is a conditional distribution of $Y$. \pause \item Each conditional distribution has an expected value (sub-population mean). \pause \item If you could estimate $E(Y|X=x)$, it would be a good way to predict $Y$ from $X$. \pause \item Estimation comes later (in STA260). \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Definition of Conditional Expectation} \pause %framesubtitle{} If $X$ and $Y$ are discrete, the conditional expected value of $Y$ given $X$ is \pause {\LARGE \begin{displaymath} E(Y|X=x) = \sum_y y \, p_{_{Y|X}}(y|x) \end{displaymath} \pause } % End size If $X$ and $Y$ are continuous, \pause {\LARGE \begin{displaymath} E(Y|X=x) = \int_{-\infty}^\infty y \, f_{_{Y|X}}(y|x) \, dy \end{displaymath} } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Double Expectation: $ E(Y) = E[E(Y|X)]$} \framesubtitle{Theorem A on page 149} \pause To make sense of this, note \pause \begin{itemize} \item While $E(Y|X=x) = \int_{-\infty}^\infty y \, f_{_{Y|X}}(y|x) \, dy$ is a real-valued function of $x$, \pause \item $E(Y|X)$ is a random variable, a function of the random variable $X$. \pause \item $E(Y|X) = g(X) = \int_{-\infty}^\infty y \, f_{_{Y|X}}(y|X) \, dy$. \pause \item So that in $E[E(Y|X)] = E[g(X)]$, the outer expected value is with respect to the probability distribution of $X$. \pause \end{itemize} \begin{eqnarray*} E[E(Y|X)] & = & E[g(X)] \\ \pause & = & \int_{-\infty}^\infty g(x) \, f_{_X}(x) \, dx \\ \pause & = & \int_{-\infty}^\infty \left( \int_{-\infty}^\infty y \, f_{_{Y|X}}(y|x) \, dy \right) \, f_{_X}(x) \, dx \end{eqnarray*} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Proof of the double expectation formula} \framesubtitle{$ E(Y) = E[E(Y|X)]$} \pause \begin{eqnarray*} E[E(Y|X)] & = & \pause \int_{-\infty}^\infty \left( \int_{-\infty}^\infty y \, f_{_{Y|X}}(y|x) \, dy \right) \, f_{_X}(x) \, dx \\ \pause & = & \int_{-\infty}^\infty \int_{-\infty}^\infty y \, \frac{f_{_{X,Y}}(x,y)}{f_{_X}(x)} \, dy \, f_{_X}(x) \, dx \\ \pause & = & \int_{-\infty}^\infty \int_{-\infty}^\infty y \, f_{_{X,Y}}(x,y) \, dy \, dx \\ \pause & = & E(Y) \end{eqnarray*} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \section{Covariance} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Definition of Covariance} \pause % \framesubtitle{} Let $X$ and $Y$ be jointly distributed random variables with $E(X)=\mu_x$ and $E(Y)=\mu_y$. The \emph{covariance} between $X$ and $Y$ is \pause {\LARGE \begin{displaymath} Cov(X,Y) = E[(X-\mu_{_X})(Y-\mu_{_Y})] \end{displaymath} \pause } % End size \begin{itemize} \item If values of $X$ that are above average tend to go with values of $Y$ that are above average (and below average $X$ tends to go with below average $Y$), the covariance will be positive. \pause \item If above average values of $X$tend to go with values of $Y$ that are \emph{below} average, the covariance will be negative. \pause \item Covariance means they vary together. \pause \item You could think of $Var(X)=E[(X-\mu_{_X})^2]$ as $Cov(X,X)$. \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Properties of Covariance} \pause %\framesubtitle{} \begin{itemize} \item[] $Cov(X,Y) = E(XY)-E(X)E(Y)$ \pause \item[] If $X$ and $Y$ are independent, $Cov(X,Y)= 0$ \pause \item[] If $Cov(X,Y)= 0$, it does \emph{not} follow that $X$ and $Y$ are independent. \pause \item[] $Cov(a+X,b+Y) \pause = Cov(X,Y)$ \pause \item[] $Cov(aX,bY) \pause = abCov(X,Y)$ \pause % Prove as HW \item[] $Cov(X,Y+Z) \pause = Cov(X,Y) + Cov(X,Z)$ \pause % \item[] $Cov(aW+bX,cY+dZ) =$ \pause \\ ~~~ $acCov(W,Y) + adCov(W,Z) + bcCov(X,Y) + bdCov(X,Z)$ \pause % Maybe just put this in HW. \item[] $Var(aX+bY) = a^2Var(X)+b^2Var(Y) + 2abCov(X,Y)$ \pause % \item[] \item[] If $X_1, \ldots, X_n$ are ind. \pause $Var(\sum_{i=1}^n X_i) = \sum_{i=1}^n Var(X_i)$ \pause \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Correlation} %\framesubtitle{Population correlation, Greek letter ``rho"} \pause {\LARGE \begin{displaymath} Corr(X,Y) = \rho = \frac{Cov(X,Y)}{\sqrt{Var(X) \, Var(Y)}} \end{displaymath} \pause } % End size \begin{itemize} \item $-1 \leq \rho \leq 1$ \pause \item Scale free: $Corr(aX,bY) = Corr(X,Y)$ \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{Copyright Information} This slide show was prepared by \href{http://www.utstat.toronto.edu/~brunner}{Jerry Brunner}, Department of Statistical Sciences, University of Toronto. It is licensed under a \href{http://creativecommons.org/licenses/by-sa/3.0/deed.en_US} {Creative Commons Attribution - ShareAlike 3.0 Unported License}. Use any part of it as you like and share the result freely. The \LaTeX~source code is available from the course website: \vspace{5mm} \href{http://www.utstat.toronto.edu/~brunner/oldclass/256f19} {\small\texttt{http://www.utstat.toronto.edu/$^\sim$brunner/oldclass/256f19}} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \end{document} and $Var(Y) = \sigma^2$ %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{} \pause %\framesubtitle{} \begin{itemize} \item \pause \item \pause \item \end{itemize} \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{center} \includegraphics[width=2in]{BivariateNormal} \end{center} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{} %\framesubtitle{} {\LARGE \begin{eqnarray*} m_1 & = & a + b \\ m_2 & = & c + d \end{eqnarray*} } % End size \end{frame} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% \begin{frame} \frametitle{} \pause %\framesubtitle{} \begin{itemize} \item \pause \item \pause \item \end{itemize} \end{frame}