\item$H(fairCoint)=\sum_{i=1}^{2}(\frac{1}{2})\log_{2}(\frac{1}{2})=-\sum_{i=1}^{2}(\frac{1}{2})\times(-1)=1$\note{\item Because it is a fair coin - each toss can tell us nothing}
\item How about an unfair coin? What is the entropy for a coin of probability 0.9?
\note{\item Whiteboard time if students stuck: \begin{itemize}}
\note{\item Answer is: $ H(dodgyCoin)=\sum_{i=1}^{2}(0.9)\log_{2}(0.9)=$}
\note{\item$P(x_{0})=0.9, P(x_{1})=0.1$}
\note{\item Answer is: $ H(dodgyCoin)=-\sum_{i=1}^{2}P(x_{i})\log_{2}P(x_{i})=$}
\uncover<6->{\includegraphics[scale=0.4]{entropy}\footnote<6->{Borrowed from \href{https://en.wikipedia.org/wiki/Entropy_(information_theory)}{wikipedia}}}
\uncover<6->{\includegraphics[scale=0.4]{entropy}\footnote<6->{Borrowed from \href{https://en.wikipedia.org/wiki/Entropy_(information_theory)}{wikipedia}}}\note{\item Check our answer from earlier matches the diagram}