Commit 4e8b17fe authored by Lukas Kettenbach's avatar Lukas Kettenbach
Browse files

Documentation, some bug fixes, presentation

parent c3d5f339
This diff is collapsed.
\section{Factor Trees}
% Task
% Factor Elimination
% Performance depends on elimination order
% Factor tree with messages
% tree building
\subsection{Task Description}
\begin{frame}
\frametitle{Task Description}
\begin{columns}
\column{.4\textwidth}
\begin{itemize}
\item Exact inference
\item Use elimination trees
\item Prior Marginal, Posterior Marginal \& PoE
\end{itemize}
\column{0.6\textwidth}
\includegraphics[width=.8\textwidth]{figures/algo10}
\end{columns}
\end{frame}
\subsection{Factor Elimination}
\subsection{Elimination Trees}
\subsection{Building Strategies}
\subsection{Literature}
\begin{frame}
\frametitle{Literature}
\begin{itemize}
\item Modeling and Reasoning with Bayesian Networks , Adnan Darwiche
\end{itemize}
\end{frame}
\begin{frame}
\textbf{Thank you for your attention!}
\end{frame}
This diff is collapsed.
\section{Introduction}
\begin{frame}
\frametitle{Introduction}
\Large\textbf{Idea}
\begin{itemize}
\item probabilistic inference modules for Python
\item library which offers well known probabilistic (graphical) models like Bayesian or temporal networks
\item variety of inference algorithms
\end{itemize}
\Large\textbf{Download/Documentation/Installation Guide}
\begin{itemize}
\item \url{github.com/mbaumBielefeld/PRIMO}
\item \url{github.com/mbaumBielefeld/PRIMO/wiki}
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{Structure}
\Large\textbf{PRIMO/}
\begin{itemize}
\item doc/
\item examples/
\item primo/
\begin{itemize}
\item core/ $\rightarrow$ BayesNet.py, Node.py, DynamicBayesNet.py, ...
\item decision/ $\rightarrow$ DecisionNode.py, UtilityNode.py, ...
\item reasoning/ $\rightarrow$ DiscreteNode.py, density/, MCMC.py, ...
\item tests/
\item utils/ $\rightarrow$ XMLBIF.py
\end{itemize}
\item setup.py
\end{itemize}
\end{frame}
\ No newline at end of file
This is pdfTeX, Version 3.1415926-1.40.10 (TeX Live 2009/Debian) (format=pdflatex 2013.4.6) 11 OCT 2013 13:45
entering extended mode
%&-line parsing enabled.
**lukas.tex
(./lukas.tex
LaTeX2e <2009/09/24>
Babel <v3.8l> and hyphenation patterns for english, usenglishmax, dumylang, noh
yphenation, ngerman, german, german-x-2009-06-19, ngerman-x-2009-06-19, loaded.
! Undefined control sequence.
l.1 \section
{Dynamic Bayesian Networks}
The control sequence at the end of the top line
of your error message was never \def'ed. If you have
misspelled it (e.g., `\hobx'), type `I' and the correct
spelling (e.g., `I\hbox'). Otherwise just continue,
and I'll forget about whatever was undefined.
! LaTeX Error: Missing \begin{document}.
See the LaTeX manual or LaTeX Companion for explanation.
Type H <return> for immediate help.
...
l.1 \section{D
ynamic Bayesian Networks}
You're in trouble here. Try typing <return> to proceed.
If that doesn't work, type X <return> to quit.
Missing character: There is no D in font nullfont!
Missing character: There is no y in font nullfont!
Missing character: There is no n in font nullfont!
Missing character: There is no a in font nullfont!
Missing character: There is no m in font nullfont!
Missing character: There is no i in font nullfont!
Missing character: There is no c in font nullfont!
Missing character: There is no B in font nullfont!
Missing character: There is no a in font nullfont!
Missing character: There is no y in font nullfont!
Missing character: There is no e in font nullfont!
Missing character: There is no s in font nullfont!
Missing character: There is no i in font nullfont!
Missing character: There is no a in font nullfont!
Missing character: There is no n in font nullfont!
Missing character: There is no N in font nullfont!
Missing character: There is no e in font nullfont!
Missing character: There is no t in font nullfont!
Missing character: There is no w in font nullfont!
Missing character: There is no o in font nullfont!
Missing character: There is no r in font nullfont!
Missing character: There is no k in font nullfont!
Missing character: There is no s in font nullfont!
Overfull \hbox (20.0pt too wide) in paragraph at lines 1--2
[]
[]
! LaTeX Error: Missing \begin{document}.
See the LaTeX manual or LaTeX Companion for explanation.
Type H <return> for immediate help.
...
l.4 \frametitle
{Dynamic Bayesian Networks}
You're in trouble here. Try typing <return> to proceed.
If that doesn't work, type X <return> to quit.
! Undefined control sequence.
<argument> \frametitle
l.4 \frametitle
{Dynamic Bayesian Networks}
The control sequence at the end of the top line
of your error message was never \def'ed. If you have
misspelled it (e.g., `\hobx'), type `I' and the correct
spelling (e.g., `I\hbox'). Otherwise just continue,
and I'll forget about whatever was undefined.
Missing character: There is no D in font nullfont!
Missing character: There is no y in font nullfont!
Missing character: There is no n in font nullfont!
Missing character: There is no a in font nullfont!
Missing character: There is no m in font nullfont!
Missing character: There is no i in font nullfont!
Missing character: There is no c in font nullfont!
Missing character: There is no B in font nullfont!
Missing character: There is no a in font nullfont!
Missing character: There is no y in font nullfont!
Missing character: There is no e in font nullfont!
Missing character: There is no s in font nullfont!
Missing character: There is no i in font nullfont!
Missing character: There is no a in font nullfont!
Missing character: There is no n in font nullfont!
Missing character: There is no N in font nullfont!
Missing character: There is no e in font nullfont!
Missing character: There is no t in font nullfont!
Missing character: There is no w in font nullfont!
Missing character: There is no o in font nullfont!
Missing character: There is no r in font nullfont!
Missing character: There is no k in font nullfont!
Missing character: There is no s in font nullfont!
! LaTeX Error: Environment definition undefined.
See the LaTeX manual or LaTeX Companion for explanation.
Type H <return> for immediate help.
...
l.5 \begin{definition}
Your command was ignored.
Type I <command> <return> to replace it with another command,
or <return> to continue without it.
Missing character: There is no D in font nullfont!
Missing character: There is no e in font nullfont!
Missing character: There is no f in font nullfont!
Missing character: There is no i in font nullfont!
Missing character: There is no n in font nullfont!
Missing character: There is no i in font nullfont!
Missing character: There is no t in font nullfont!
Missing character: There is no i in font nullfont!
Missing character: There is no o in font nullfont!
Missing character: There is no n in font nullfont!
! LaTeX Error: \begin{frame} on input line 3 ended by \end{definition}.
See the LaTeX manual or LaTeX Companion for explanation.
Type H <return> for immediate help.
...
l.7 \end{definition}
Your command was ignored.
Type I <command> <return> to replace it with another command,
or <return> to continue without it.
)
! Emergency stop.
<*> lukas.tex
*** (job aborted, no legal \end found)
Here is how much of TeX's memory you used:
9 strings out of 495021
166 string characters out of 1181036
45235 words of memory out of 3000000
3301 multiletter control sequences out of 15000+50000
3640 words of font info for 14 fonts, out of 3000000 for 9000
28 hyphenation exceptions out of 8191
7i,3n,6p,50b,28s stack positions out of 5000i,500n,10000p,200000b,50000s
! ==> Fatal error occurred, no output PDF file produced!
\section{Dynamic Bayesian Networks}
\subsection{Definition}
\begin{frame}
%\frametitle{Dynamic Bayesian Networks}
\begin{definition}
A DBN is a pair $(B_0, B_{\rightarrow})$, where $B_0$ is a Bayesian network over $\chi^{(0)}$ representing the initial distribution, and $B_{\rightarrow}$ is a 2-TBN for the process. For any desired time span $T \geq 0$, the distribution over $\chi^{(0:T)}$ is defined as a unrolled Bayesian network, where, for any $i=1,...,n$:
\begin{itemize}
\item the structure and CPDs of $X_i^{(0)}$ are the same as those for $X_i$ in $B_0$,
\item the structure and CPDs of $X_i^{(t)}$ for $ t \geq 0 $ are the same as those for $X_i^{'}$ in $B_\rightarrow$.
\end{itemize}
\end{definition}
\end{frame}
\subsection{Example}
\begin{frame}
%\frametitle{Example}
\includegraphics[width=.9\textwidth]{figures/dbn}
\end{frame}
\subsection{Inference}
\begin{frame}
\frametitle{Exact Inference}
\begin{itemize}
\item We can use standard inference algorithms (e.g. variable elimination)
\item Problem I: run inference on larger an larger networks over time
\item Problem II: maintain our entire history of observations indefinitely
\item Solution/workaround: use approximate inference
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{Approximate Inference}
\begin{itemize}
\item We can use some kind of Likelihood Weighting
\item Two modifications:
\begin{enumerate}
\item run all samples together through the DBN, one slice at a time
\item focus the set of samples on the high-probability regions of the state space
\end{enumerate}
\item Particle Filter:
\begin{enumerate}
\item Each sample is propagated forward by sampling the next state value $x_{t+1}$ given the current value $x_t$ for the sample
\item Each sample is weighted by the likelihood it assigns to the new evidence $P(e_{t+1}|x_{t+1})$
\item The population is \textit{resampled} to generate a new population of $N$ samples. Each new sample is selected from the current population; the probability that a particular sample is selected is proportional to its weight.
\end{enumerate}
\end{itemize}
\end{frame}
\begin{frame}
\frametitle{Algorithm}
\end{frame}
\ No newline at end of file
\section{manu}
\ No newline at end of file
\section{max}
\ No newline at end of file
\relax
\ifx\hyper@anchor\@undefined
\global \let \oldcontentsline\contentsline
\gdef \contentsline#1#2#3#4{\oldcontentsline{#1}{#2}{#3}}
\global \let \oldnewlabel\newlabel
\gdef \newlabel#1#2{\newlabelxx{#1}#2}
\gdef \newlabelxx#1#2#3#4#5#6{\oldnewlabel{#1}{{#2}{#3}}}
\AtEndDocument{\let \contentsline\oldcontentsline
\let \newlabel\oldnewlabel}
\else
\global \let \hyper@last\relax
\fi
\catcode`"\active
\@writefile{toc}{\beamer@endinputifotherversion {3.10pt}}
\@writefile{nav}{\beamer@endinputifotherversion {3.10pt}}
\select@language{ngerman}
\@writefile{toc}{\select@language{ngerman}}
\@writefile{lof}{\select@language{ngerman}}
\@writefile{lot}{\select@language{ngerman}}
\@writefile{nav}{\headcommand {\slideentry {0}{0}{1}{1/1}{}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {1}{1}}}
\@writefile{toc}{\beamer@sectionintoc {1}{Introduction}{2}{0}{1}}
\@writefile{nav}{\headcommand {\sectionentry {1}{Introduction}{2}{Introduction}{0}}}
\@writefile{nav}{\headcommand {\beamer@sectionpages {1}{1}}}
\@writefile{nav}{\headcommand {\beamer@subsectionpages {1}{1}}}
\@writefile{nav}{\headcommand {\slideentry {1}{0}{2}{2/2}{}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {2}{2}}}
\@writefile{nav}{\headcommand {\slideentry {1}{0}{3}{3/3}{}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {3}{3}}}
\@writefile{toc}{\beamer@sectionintoc {2}{Dynamic Bayesian Networks}{4}{0}{2}}
\@writefile{nav}{\headcommand {\sectionentry {2}{Dynamic Bayesian Networks}{4}{Dynamic Bayesian Networks}{0}}}
\@writefile{nav}{\headcommand {\beamer@sectionpages {2}{3}}}
\@writefile{nav}{\headcommand {\beamer@subsectionpages {2}{3}}}
\@writefile{toc}{\beamer@subsectionintoc {2}{1}{Definition}{4}{0}{2}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{2}{1}{4}{Definition}}\headcommand {\beamer@subsectionpages {4}{3}}}
\@writefile{nav}{\headcommand {\slideentry {2}{1}{1}{4/4}{Definition}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {4}{4}}}
\@writefile{toc}{\beamer@subsectionintoc {2}{2}{Example}{5}{0}{2}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{2}{2}{5}{Example}}\headcommand {\beamer@subsectionpages {4}{4}}}
\@writefile{nav}{\headcommand {\slideentry {2}{2}{1}{5/5}{Example}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {5}{5}}}
\@writefile{toc}{\beamer@subsectionintoc {2}{3}{Inference}{6}{0}{2}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{2}{3}{6}{Inference}}\headcommand {\beamer@subsectionpages {5}{5}}}
\@writefile{nav}{\headcommand {\slideentry {2}{3}{1}{6/6}{Inference}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {6}{6}}}
\@writefile{nav}{\headcommand {\slideentry {2}{3}{2}{7/7}{Inference}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {7}{7}}}
\@writefile{nav}{\headcommand {\slideentry {2}{3}{3}{8/8}{Inference}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {8}{8}}}
\@writefile{toc}{\beamer@sectionintoc {3}{Factor Trees}{9}{0}{3}}
\@writefile{nav}{\headcommand {\sectionentry {3}{Factor Trees}{9}{Factor Trees}{0}}}
\@writefile{nav}{\headcommand {\beamer@sectionpages {4}{8}}}
\@writefile{nav}{\headcommand {\beamer@subsectionpages {6}{8}}}
\@writefile{toc}{\beamer@subsectionintoc {3}{1}{Task Description}{9}{0}{3}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{3}{1}{9}{Task Description}}\headcommand {\beamer@subsectionpages {9}{8}}}
\@writefile{nav}{\headcommand {\slideentry {3}{1}{1}{9/9}{Task Description}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {9}{9}}}
\@writefile{toc}{\beamer@subsectionintoc {3}{2}{Factor Elimination}{10}{0}{3}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{3}{2}{10}{Factor Elimination}}\headcommand {\beamer@subsectionpages {9}{9}}}
\@writefile{toc}{\beamer@subsectionintoc {3}{3}{Elimination Trees}{10}{0}{3}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{3}{3}{10}{Elimination Trees}}\headcommand {\beamer@subsectionpages {10}{9}}}
\@writefile{toc}{\beamer@subsectionintoc {3}{4}{Building Strategies}{10}{0}{3}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{3}{4}{10}{Building Strategies}}\headcommand {\beamer@subsectionpages {10}{9}}}
\@writefile{toc}{\beamer@subsectionintoc {3}{5}{Literature}{10}{0}{3}}
\@writefile{nav}{\headcommand {\beamer@subsectionentry {0}{3}{5}{10}{Literature}}\headcommand {\beamer@subsectionpages {10}{9}}}
\@writefile{nav}{\headcommand {\slideentry {3}{5}{1}{10/10}{Literature}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {10}{10}}}
\@writefile{nav}{\headcommand {\slideentry {3}{5}{2}{11/11}{Literature}{0}}}
\@writefile{nav}{\headcommand {\beamer@framepages {11}{11}}}
\@writefile{nav}{\headcommand {\beamer@partpages {1}{11}}}
\@writefile{nav}{\headcommand {\beamer@subsectionpages {12}{11}}}
\@writefile{nav}{\headcommand {\beamer@sectionpages {12}{11}}}
\@writefile{nav}{\headcommand {\beamer@documentpages {11}}}
\@writefile{nav}{\headcommand {\def \inserttotalframenumber {11}}}
This diff is collapsed.
\beamer@endinputifotherversion {3.10pt}
\headcommand {\slideentry {0}{0}{1}{1/1}{}{0}}
\headcommand {\beamer@framepages {1}{1}}
\headcommand {\sectionentry {1}{Introduction}{2}{Introduction}{0}}
\headcommand {\beamer@sectionpages {1}{1}}
\headcommand {\beamer@subsectionpages {1}{1}}
\headcommand {\slideentry {1}{0}{2}{2/2}{}{0}}
\headcommand {\beamer@framepages {2}{2}}
\headcommand {\slideentry {1}{0}{3}{3/3}{}{0}}
\headcommand {\beamer@framepages {3}{3}}
\headcommand {\sectionentry {2}{Dynamic Bayesian Networks}{4}{Dynamic Bayesian Networks}{0}}
\headcommand {\beamer@sectionpages {2}{3}}
\headcommand {\beamer@subsectionpages {2}{3}}
\headcommand {\beamer@subsectionentry {0}{2}{1}{4}{Definition}}\headcommand {\beamer@subsectionpages {4}{3}}
\headcommand {\slideentry {2}{1}{1}{4/4}{Definition}{0}}
\headcommand {\beamer@framepages {4}{4}}
\headcommand {\beamer@subsectionentry {0}{2}{2}{5}{Example}}\headcommand {\beamer@subsectionpages {4}{4}}
\headcommand {\slideentry {2}{2}{1}{5/5}{Example}{0}}
\headcommand {\beamer@framepages {5}{5}}
\headcommand {\beamer@subsectionentry {0}{2}{3}{6}{Inference}}\headcommand {\beamer@subsectionpages {5}{5}}
\headcommand {\slideentry {2}{3}{1}{6/6}{Inference}{0}}
\headcommand {\beamer@framepages {6}{6}}
\headcommand {\slideentry {2}{3}{2}{7/7}{Inference}{0}}
\headcommand {\beamer@framepages {7}{7}}
\headcommand {\slideentry {2}{3}{3}{8/8}{Inference}{0}}
\headcommand {\beamer@framepages {8}{8}}
\headcommand {\sectionentry {3}{Factor Trees}{9}{Factor Trees}{0}}
\headcommand {\beamer@sectionpages {4}{8}}
\headcommand {\beamer@subsectionpages {6}{8}}
\headcommand {\beamer@subsectionentry {0}{3}{1}{9}{Task Description}}\headcommand {\beamer@subsectionpages {9}{8}}
\headcommand {\slideentry {3}{1}{1}{9/9}{Task Description}{0}}
\headcommand {\beamer@framepages {9}{9}}
\headcommand {\beamer@subsectionentry {0}{3}{2}{10}{Factor Elimination}}\headcommand {\beamer@subsectionpages {9}{9}}
\headcommand {\beamer@subsectionentry {0}{3}{3}{10}{Elimination Trees}}\headcommand {\beamer@subsectionpages {10}{9}}
\headcommand {\beamer@subsectionentry {0}{3}{4}{10}{Building Strategies}}\headcommand {\beamer@subsectionpages {10}{9}}
\headcommand {\beamer@subsectionentry {0}{3}{5}{10}{Literature}}\headcommand {\beamer@subsectionpages {10}{9}}
\headcommand {\slideentry {3}{5}{1}{10/10}{Literature}{0}}
\headcommand {\beamer@framepages {10}{10}}
\headcommand {\slideentry {3}{5}{2}{11/11}{Literature}{0}}
\headcommand {\beamer@framepages {11}{11}}
\headcommand {\beamer@partpages {1}{11}}
\headcommand {\beamer@subsectionpages {12}{11}}
\headcommand {\beamer@sectionpages {12}{11}}
\headcommand {\beamer@documentpages {11}}
\headcommand {\def \inserttotalframenumber {11}}
\BOOKMARK [2][]{Outline0.1}{Introduction}{}
\BOOKMARK [2][]{Outline0.2}{Dynamic Bayesian Networks}{}
\BOOKMARK [3][]{Outline0.2.1.4}{Definition}{Outline0.2}
\BOOKMARK [3][]{Outline0.2.2.5}{Example}{Outline0.2}
\BOOKMARK [3][]{Outline0.2.3.6}{Inference}{Outline0.2}
\BOOKMARK [2][]{Outline0.3}{Factor Trees}{}
\BOOKMARK [3][]{Outline0.3.1.9}{Task Description}{Outline0.3}
\BOOKMARK [3][]{Outline0.3.2.10}{Factor Elimination}{Outline0.3}
\BOOKMARK [3][]{Outline0.3.3.10}{Elimination Trees}{Outline0.3}
\BOOKMARK [3][]{Outline0.3.4.10}{Building Strategies}{Outline0.3}
\BOOKMARK [3][]{Outline0.3.5.10}{Literature}{Outline0.3}
\documentclass[c, compress, ngerman, presentation, final, xcolor=svgnames]{beamer}
% standard packages
\usepackage[utf8]{inputenc}
\usepackage{times}
\usepackage[T1]{fontenc}
\usepackage{eurosym, babel}
\usepackage{amsmath}
% \usepackage{listings}
% \usepackage{multimedia, fancybox}
% \usepackage{calc, amssymb, amsmath, amsfonts}
% \usepackage{pstricks, pst-bar}
% \usepackage{graphicx}
% \usepackage{color}
% \usepackage{caption3} % load caption package kernel first
% ---------- for example pages ----------------
\usepackage{tikz}
\usetikzlibrary{arrows}
\tikzstyle{block}=[draw opacity=0.7,line width=1.4cm]
% ---------------------------------------------
% ---------- various options ----------------
\usepackage[small]{caption}
\DeclareCaptionOption{parskip}[]{} % disable "parskip" caption option
\setbeamercovered{transparent} % enable semi transparent bullet points
% ---------------------------------------------
% ---------- color and theme settings ----------------
\usetheme{Darmstadt}
\usecolortheme[named=SteelBlue]{structure}
\usefonttheme[onlylarge]{structurebold}
\setbeamerfont*{frametitle}{size=\normalsize,series=\bfseries}
%\setbeamertemplate{items}[ball] % round numberings and bullets
%\setbeamertemplate{blocks}[rounded][shadow=true] %rounded boxes with shadows
\setbeamertemplate{navigation symbols}{} %hide navigation symbols
% ---------------------------------------------
% ---------- Footnote ----------------
\setbeamertemplate{footline} {
\begin{beamercolorbox}[wd=0.5\textwidth,ht=3ex,dp=1.5ex,leftskip=.5em,rightskip=.5em]{author in head/foot}%
\usebeamerfont{author in head/foot}%
\hfill \insertshortauthor%
\end{beamercolorbox}%
\vspace*{-4.5ex}\hspace*{0.5\textwidth}%
\begin{beamercolorbox}[wd=0.5\textwidth,ht=3ex,dp=1.5ex,left,leftskip=.5em]
{title in head/foot}%
\usebeamerfont{title in head/foot}%
\insertshorttitle \hfill \hfill \hfill \hfill \insertframenumber%
\end{beamercolorbox} }
% ---------------------------------------------
% ---------- Author, Title, etc. ----------------
\title[] {PRIMO}
\subtitle[]{PRobabilistic Inference MOdules}
\author[]{Manuel Baum, Denis John, \\ Lukas Kettenbach, Maximilian Koch}
\institute[]{Bielefeld University}
\date{\today}
% ---------------------------------------------
% ---------- document start ----------------
\begin{document}
\maketitle
%\begin{frame}{Table of Contents}{}
% \tableofcontents%[pausesections]
%\end{frame}
%\AtBeginSection{\begin{frame}<beamer>{Gliederung} \tableofcontents[currentsection, currentsubsection] \end{frame}}
%\input{010_example}
\input{input/intro}
\input{input/lukas}
\input{input/denis}
\input{input/manu}
\input{input/max}
\end{document}
\beamer@endinputifotherversion {3.10pt}
\select@language {ngerman}
\beamer@sectionintoc {1}{Introduction}{2}{0}{1}
\beamer@sectionintoc {2}{Dynamic Bayesian Networks}{4}{0}{2}
\beamer@subsectionintoc {2}{1}{Definition}{4}{0}{2}
\beamer@subsectionintoc {2}{2}{Example}{5}{0}{2}
\beamer@subsectionintoc {2}{3}{Inference}{6}{0}{2}
\beamer@sectionintoc {3}{Factor Trees}{9}{0}{3}
\beamer@subsectionintoc {3}{1}{Task Description}{9}{0}{3}
\beamer@subsectionintoc {3}{2}{Factor Elimination}{10}{0}{3}
\beamer@subsectionintoc {3}{3}{Elimination Trees}{10}{0}{3}
\beamer@subsectionintoc {3}{4}{Building Strategies}{10}{0}{3}
\beamer@subsectionintoc {3}{5}{Literature}{10}{0}{3}
......@@ -7,6 +7,8 @@ from primo.reasoning import DiscreteNode
import primo.reasoning.particlebased.ParticleFilterDBN as pf
import numpy
from primo.utils import XMLBIF
import random
# Construct a DynmaicBayesianNetwork
dbn = DynamicBayesNet()
......@@ -32,24 +34,68 @@ N = 1000
T = 10
pos = 0
lastPos = 0
evidence = {}
def get_evidence_function():
global pos
global evidence
global evidence
simulate_evidence()
simulate_next_pos()
return evidence
def simulate_next_pos():
global pos
global lastPos
lastPos = pos
random_pos = random.random()
if random_pos >= 0.1:
pos = pos + 1
elif random_pos >= 0.05:
pos = pos
else:
pos = pos - 1
if pos == 10:
pos = 0
if pos == -1:
pos = 9
def simulate_evidence():
global pos
global evidence
global door
err = False
if random.random() > 0.99:
err = True
if pos == 1 or pos == 3 or pos == 7:
evidence = {door:"True"}
if err:
evidence = {door:"False"}
else:
evidence = {door:"True"}
else:
evidence = {door:"False"}
pos = pos + 1
return evidence
if err:
evidence = {door:"True"}
else:
evidence = {door:"False"}
class RobotParticle(pf.Particle):