Skip to content

Commit

Permalink
Merge pull request #23 from kashefy/hmm
Browse files Browse the repository at this point in the history
Hmm
  • Loading branch information
kashefy authored Jul 15, 2020
2 parents 93888c1 + ea45391 commit 6b78707
Show file tree
Hide file tree
Showing 51 changed files with 38,519 additions and 1 deletion.
2 changes: 1 addition & 1 deletion notes/12_gmm-em/2_em.tex
Original file line number Diff line number Diff line change
Expand Up @@ -88,7 +88,7 @@ \subsection{Latent variable models}

\svspace{-7mm}

\notesonly{We have also see how Gaussian Mixture Models can model assignment variables using mixture components.}
\notesonly{We have also seen how Gaussian Mixture Models can model assignment variables using mixture components.}

\begin{center}
\includegraphics[width=0.7\textwidth]{img/latentexample_gmm}
Expand Down
136 changes: 136 additions & 0 deletions notes/13_hmm/0_hmm-motivation.tex
Original file line number Diff line number Diff line change
@@ -0,0 +1,136 @@
\section{A latent variable model for temporal data}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\begin{frame}{Let's talk about the weather}

\svspace{-5mm}

\begin{center}
\includegraphics[width=0.7\textwidth]{img/weather}
\end{center}

\begin{center}
\slidesonly{
\only<2>{
\includegraphics[width=0.6\textwidth]{img/latentexample_gmm_weather}
}
}
\only<3>{
\includegraphics<3>[width=0.6\textwidth]{img/latentexample_gmm_weather_icons}
}
\notesonly{\captionof{figure}
{A density estimation from a latent variable model}
}\slidesonly{\captionof*{figure}
{A density estimation from a latent variable model}
}
\end{center}

\end{frame}

\begin{frame}{It's not all about amplitudes}

\begin{center}
\includegraphics[width=0.9\textwidth]{img/sin}
\end{center}


\end{frame}

\begin{frame}

\begin{itemize}
\only<1->{
\item We have a sequence of observed events: $\vec x^{(t)} \in \R^N$
\item successive events $\vec x^{(t)}, \vec x^{(t-1)}$ \underline{cannot} be treated as independent
}
\slidesonly{
\svspace{10mm}
\only<2>{
\begin{minipage}{0.45\textwidth}
\captionof*{figure}{at t=1}
\includegraphics[width=0.99\textwidth]{img/Living-Room-Scene_001_figure_1}
\end{minipage}
\begin{minipage}{0.45\textwidth}
\captionof*{figure}{at t=2}
\includegraphics[width=0.99\textwidth]{img/Living-Room-Scene_001_figure_2}
\end{minipage}\\

\begin{minipage}{0.45\textwidth}
\includegraphics[width=0.99\textwidth]{img/Living-Room-Scene_001_figure_3}
\captionof*{figure}{at t=3}
\end{minipage}
\begin{minipage}{0.45\textwidth}
\includegraphics[width=0.99\textwidth]{img/Living-Room-Scene_001_figure_4}
\captionof*{figure}{at t=4}
\end{minipage}
}
}
\only<3->{
\item Assumption:\\
What we observe at every time step in the sequence $\{ \vec x^{(t)}\}_{t=1}^{T}$ is a result of the ``system'' being in a specific \emph{hidden state} at every time step $t$:\\
e.g. 1-out-of-$M$ coding for $M$ different states:
\begin{itemize}
\item $\vec{m}^{(t)} = \big( m_1^{(t)}, \dots, m_M^{(t)} \big)^\top \in \left\{ 0, 1 \right\}^M$ \\
\begin{align}
m_q^{(t)} &=
\begin{cases}
1, & \text{if system is in state } q \text{ at time}~t\\
0, & \text{otherwise}
\end{cases}
%\hspace{0.5cm}
\;\text{with} \;
\sum_{q=1}^{M} m_q^{(t)} = 1
\end{align}
\end{itemize}
\item Our observed sequence is a result of this \emph{hidden state} sequence
}
\end{itemize}

\end{frame}

\begin{frame}{Only}
\frametitle{Possibilities}

We may want to do:
\begin{itemize}
\item<only@1> Describe the sequence of hidden states:
$\{ \vec m^{(1)}, \ldots, \vec m^{(t)}, \ldots, \vec m^{(T)}\} = \{ \vec m^{(t)}\}_{t=1}^{T} \stackrel{\substack{\text{for}\\ \text{brevity}}}{=} \{ \vec m^{(t)}\}$
after observing the the sequence $\{\vec x^{(t)}\}$\\

\svspace{5mm}

\begin{center}
\includegraphics[width=0.7\textwidth]{img/weather_est_states}
\notesonly{\captionof{figure}{Predict the sequence of hidden states}}
\end{center}

Example: hear sounds $\rightarrow$ what words were said? (transcribing speech)
\item<only@2> Generate a sequence of observations given a sequence of hidden variables $\{\vec m^{(t)}\}$
\\

\svspace{5mm}

\begin{center}
\includegraphics[width=0.7\textwidth]{img/weather_est_obs}
\notesonly{\captionof{figure}{Predict the next sequence of observations}}
\end{center}

Example: type in words $\rightarrow$ hear speech (speech synthesis)
\item<only@3> Given a sequence $\{\vec x^{(t)}\}$ or $\{\vec m^{(t)}\}$ or both,\\
predict the next $\vec x$ and/or $\vec m$

\svspace{5mm}

\begin{center}
\includegraphics[width=0.7\textwidth]{img/weather_est_next}
\notesonly{\captionof{figure}{Predict the next state and/or observation}}
\end{center}

\end{itemize}

\notesonly{
We can achieve all the above using Hidden Markov Models (HMM)
}

\end{frame}
79 changes: 79 additions & 0 deletions notes/13_hmm/0_recap-latent.tex
Original file line number Diff line number Diff line change
@@ -0,0 +1,79 @@
\section{Recap: Latent variable models}

\begin{frame}
\mode<presentation>{
\begin{center} \huge
\secname
\end{center}
}
\begin{center}
Latent variable models: an abstraction of Gaussian Mixture Models
\end{center}

\end{frame}

%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%

\begin{frame}{}

\begin{center}
\includegraphics[width=0.7\textwidth]{img/latentexample_gmm}
\notesonly{\captionof{figure}{A Gaussian Mixture Model as a latent variable model}}
\end{center}

``Groups'' may exist in the data. There are \emph{hidden causes} in the observations. We need a fit that accounts for this.

\end{frame}

\begin{frame}{Assignment variables as latent variables}

A simple way to understand what latent variables represent is to view them as assignment variables to components that we need to estimate.

Example:\\

\begin{itemize}
\item assignment variables: $\vec{m}^{(\alpha)} = \big( m_1^{(\alpha)}, \dots, m_M^{(\alpha)} \big)^\top \in \left\{ 0, 1 \right\}^M$ \\
\begin{align}
m_q^{(\alpha)} &=
\begin{cases}
1, & \text{if component } q \text{ has generated point}~\alpha\\
0, & \text{otherwise}
\end{cases}
%\hspace{0.5cm}
\;\text{with} \;
\sum_{q=1}^{M} m_q^{(\alpha)} = 1
\end{align}
\end{itemize}

\end{frame}

\begin{frame}{Gaussian Mixture Models are latent variable models}

\slidesonly{
\begingroup
\small
\begin{itemize}
\item assignment variables: $\vec{m}^{(\alpha)} = \big( m_1^{(\alpha)}, \dots, m_M^{(\alpha)} \big)^\top \in \left\{ 0, 1 \right\}^M$ \\
\begin{align}
m_q^{(\alpha)} &=
\begin{cases}
1, & \text{if component } q \text{ has generated point}~\alpha\\
0, & \text{otherwise}
\end{cases}
%\hspace{0.5cm}
\;\text{with} \;
\sum_{q=1}^{M} m_q^{(\alpha)} = 1
\end{align}
\end{itemize}
\endgroup
}

\notesonly{We have also seen how Gaussian Mixture Models can model assignment variables using mixture components.}

\begin{center}
\includegraphics[width=0.7\textwidth]{img/latentexample_gmm_annot}
\notesonly{\captionof{figure}{A Gaussian Mixture Model as a latent variable model with values of the assignment variables for the different groups in the data.}}
\end{center}


\end{frame}
34 changes: 34 additions & 0 deletions notes/13_hmm/1_markov.tex
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
\section{Markov chains}

\begin{frame}
\mode<presentation>{
\begin{center} \huge
\secname
\end{center}
}
\mode<presentation>{
\begin{center}
Remember them from stochastic optimization?
\end{center}
}
\end{frame}

\begin{frame}{\secname}

Consider the random variables $y^{(1)}, y^{(2)}, \ldots, y^{(T-1)}, y^{(T)}$.
There is \textbf{no} statistical independence between the $y$'s:
\begin{equation}
P(y^{(1)}, y^{(2)}, \ldots, y^{(T-1)}, y^{(T)}) \ne \prod_{t=1}^T P(y^{(t)})
\end{equation}

But

\begin{equation}
P(y^{(t)} | y^{(t-1)}, y^{(t-2)}, \ldots, y^{(2)}, y^{(1)}) = P(y^{(t)} | y^{(t-1)})
\end{equation}

$y^{(t)}$ depends only on $y^{(t-1)}$ $\rightarrow$ \emph{Markov property}

A sequence of samples of these $y$'s $\rightarrow$ \emph{Markov chain}

\end{frame}
Loading

0 comments on commit 6b78707

Please sign in to comment.