diff --git a/INSTALL.txt b/INSTALL.txt index 3a0e5b22631..f024f15274e 100644 --- a/INSTALL.txt +++ b/INSTALL.txt @@ -35,6 +35,11 @@ dependencies, and all are freely available online. * `IPython`_ (optional): A convenient python shell coming with parallel computing facilities. +* `pyTables`_ (optional): An interface to the HDF5 library for storing datasets + in binary format. + + + There are prebuilt distributions that include all the needed dependencies. For Mac OS X and Linux users, we recommend the `ActiveState`_ distributions. Windows users should download and install `Enthought Python`_. The Mac OS X @@ -67,6 +72,10 @@ tested with PyMC but may work nonetheless. .. _`IPython`: http://ipython.scipy.org/ +.. _`pyTables`: + http://www.pytables.org/moin + + Platform-specific instructions ============================== @@ -154,7 +163,7 @@ To make sure everything is working correctly, open a python shell and type:: You should see a lot of tests being run, and messages appear if errors are raised or if some tests fail. In case this happens (it shouldn't), please report -the problems on the issue tracker, specifying the version you are using and the +the problems on the `issue tracker`_, specifying the version you are using and the environment. Some of the tests require SciPy, if it is not installed on your system, you should not worry too much about failing tests. @@ -163,4 +172,8 @@ Bugs and feature requests ========================= Report problems with the installation, bugs in the code or feature request at -the issue tracker at http://code.google.com/p/pymc/issues/list . +the `issue tracker`_ at http://code.google.com/p/pymc/issues/list . + +.. _`issue tracker`: + http://code.google.com/p/pymc/issues/list . + diff --git a/README.txt b/README.txt index 135aaa8ad72..24ac48bbaa4 100644 --- a/README.txt +++ b/README.txt @@ -47,7 +47,7 @@ Features What's new in 2.0 ================= -* New, more flexible object model and syntax. +* New flexible object model and syntax (non backward compatible). * Reduced redundant computations: only relevant log-probability terms are computed, and these are cached. @@ -89,14 +89,13 @@ From a python shell, type:: S.sample(iter=10000, burn=5000, thin=2) where problem_definition is a module or a dictionary containing Node, Data and -Parameter instances defining your problem. Read the `user guide`_ for a -complete description of the package, classes and some examples to get started. +Parameter instances defining your problem. History ======= -PyMC began development in 2003, as an effort to generalize the process of building Metropolis-Hastimgs samplers, with an aim to making Markov chain Monte Carlo more accessible to non-statisticians (particularly ecologists). The choice to develop PyMC as a python module, rather than a standalone application, allowed the use MCMC methods in a larger modeling framework, in contrast to the BUGS environment. By 2005, PyMC was reliable enough for version 1.0 to be released to the public. A small group of regular users, most associated with the University of Georgia, provided much of the feedback necessary for the refinement of PyMC to its current state. +PyMC began development in 2003, as an effort to generalize the process of building Metropolis-Hastings samplers, with an aim to making Markov chain Monte Carlo more accessible to non-statisticians (particularly ecologists). The choice to develop PyMC as a python module, rather than a standalone application, allowed the use MCMC methods in a larger modeling framework, in contrast to the BUGS environment. By 2005, PyMC was reliable enough for version 1.0 to be released to the public. A small group of regular users, most associated with the University of Georgia, provided much of the feedback necessary for the refinement of PyMC to its current state. In 2006, David Huard and Anand Patil joined Chris Fonnesbeck on the development team for PyMC 2.0. This iteration of the software strives for more flexibility, better performance and a better end-user experience than any previous version of PyMC. @@ -110,5 +109,3 @@ See the `INSTALL.txt`_ file. .. _`INSTALL.txt`: ./INSTALL.txt -.. _`user guide`: - docs/pdf/new_interface.pdf diff --git a/builddocs b/builddocs index 3b842e1543e..fa0189a2885 100755 --- a/builddocs +++ b/builddocs @@ -1,12 +1,12 @@ #!/usr/bin/env bash #epydoc --verbose --debug --config epydoc.conf - +#cp docs/pdf/pymc.distributions-module.tex docs/ # Make manual cd docs -rst2latex.py ../README.txt | ./extract.py -s \\\\setlength{\\\\locallinewidth}{\\\\linewidth} -e \\hypertarget{installation}{} -o README.tex -rst2latex.py ../INSTALL.txt | ./extract.py -s \\\\setlength{\\\\locallinewidth}{\\\\linewidth} -e \\end{document} -o INSTALL.tex -rst2latex.py ../PyMC/database/README.txt | ./extract.py -s \\\\setlength{\\\\locallinewidth}{\\\\linewidth} -e \\end{document} -o database.tex +rst2latex ../README.txt | ./extract.py -s \\\\setlength{\\\\locallinewidth}{\\\\linewidth} -e \\hypertarget{installation}{} -o README.tex +rst2latex ../INSTALL.txt | ./extract.py -s \\\\setlength{\\\\locallinewidth}{\\\\linewidth} -e \\end{document} -o INSTALL.tex +rst2latex ../pymc/database/README.txt | ./extract.py -s \\\\setlength{\\\\locallinewidth}{\\\\linewidth} -e \\end{document} -o database.tex mkdir pdf pdflatex -output-directory=pdf guide2.0 diff --git a/docs/INSTALL.tex b/docs/INSTALL.tex index 8d15783cd32..93f945a4930 100644 --- a/docs/INSTALL.tex +++ b/docs/INSTALL.tex @@ -36,6 +36,10 @@ \section*{Dependencies} \href{http://ipython.scipy.org/}{IPython} (optional): A convenient python shell coming with parallel computing facilities. +\item {} +\href{http://www.pytables.org/moin}{pyTables} (optional): An interface to the HDF5 library for storing datasets +in binary format. + \end{itemize} There are prebuilt distributions that include all the needed dependencies. For @@ -215,7 +219,7 @@ \section*{Running the test suite} You should see a lot of tests being run, and messages appear if errors are raised or if some tests fail. In case this happens (it shouldn't), please report -the problems on the issue tracker, specifying the version you are using and the +the problems on the \href{http://code.google.com/p/pymc/issues/list.}{issue tracker}, specifying the version you are using and the environment. Some of the tests require SciPy, if it is not installed on your system, you should not worry too much about failing tests. @@ -228,6 +232,6 @@ \section*{Bugs and feature requests} \label{bugs-and-feature-requests} Report problems with the installation, bugs in the code or feature request at -the issue tracker at \href{http://code.google.com/p/pymc/issues/list}{http://code.google.com/p/pymc/issues/list} . +the \href{http://code.google.com/p/pymc/issues/list.}{issue tracker} at \href{http://code.google.com/p/pymc/issues/list}{http://code.google.com/p/pymc/issues/list} . \ \ No newline at end of file diff --git a/docs/README.tex b/docs/README.tex index 924d6fcb9bf..c7d30421e7b 100644 --- a/docs/README.tex +++ b/docs/README.tex @@ -63,7 +63,7 @@ \section*{What's new in 2.0} \label{what-s-new-in-2-0} \begin{itemize} \item {} -New, more flexible object model and syntax. +New flexible object model and syntax (non backward compatible). \item {} Reduced redundant computations: only relevant log-probability terms are @@ -118,8 +118,7 @@ \section*{Usage} }\end{quote} where problem{\_}definition is a module or a dictionary containing Node, Data and -Parameter instances defining your problem. Read the \href{docs/pdf/new_interface.pdf}{user guide} for a -complete description of the package, classes and some examples to get started. +Parameter instances defining your problem. %___________________________________________________________________________ @@ -129,7 +128,7 @@ \section*{Usage} \section*{History} \label{history} -PyMC began development in 2003, as an effort to generalize the process of building Metropolis-Hastimgs samplers, with an aim to making Markov chain Monte Carlo more accessible to non-statisticians (particularly ecologists). The choice to develop PyMC as a python module, rather than a standalone application, allowed the use MCMC methods in a larger modeling framework, in contrast to the BUGS environment. By 2005, PyMC was reliable enough for version 1.0 to be released to the public. A small group of regular users, most associated with the University of Georgia, provided much of the feedback necessary for the refinement of PyMC to its current state. +PyMC began development in 2003, as an effort to generalize the process of building Metropolis-Hastings samplers, with an aim to making Markov chain Monte Carlo more accessible to non-statisticians (particularly ecologists). The choice to develop PyMC as a python module, rather than a standalone application, allowed the use MCMC methods in a larger modeling framework, in contrast to the BUGS environment. By 2005, PyMC was reliable enough for version 1.0 to be released to the public. A small group of regular users, most associated with the University of Georgia, provided much of the feedback necessary for the refinement of PyMC to its current state. In 2006, David Huard and Anand Patil joined Chris Fonnesbeck on the development team for PyMC 2.0. This iteration of the software strives for more flexibility, better performance and a better end-user experience than any previous version of PyMC. diff --git a/docs/database.tex b/docs/database.tex index f2ecee666d6..cb2e82ea2b7 100644 --- a/docs/database.tex +++ b/docs/database.tex @@ -1,6 +1,6 @@ -By default, PyMC keeps the sampled data in memory and keeps no trace of it on the hard drive. To save this data to disk, PyMC provides different strategies, from simple ASCII files to compressed binary formats. These strategies are implemented different \emph{database backends}, behaving identically from the user's perspective. In the following, the interface to these backends is discussed, and a description of the different backends is given. +By default, PyMC keeps the sampled data in memory and keeps no trace of it on the hard drive. To save this data to disk, PyMC provides different storing strategies, which we refer to as \emph{database backends}. All those backends provide the same user interface, making it trivial to switch from one backend to another. In the following, this common interface is presented, along with an individual description of each backend. %___________________________________________________________________________ @@ -10,31 +10,16 @@ \section*{Accessing Sampled Data: User Interface} \label{accessing-sampled-data-user-interface} -The choice of database backend is made when a sampler is created using the \titlereference{db} keyword: +The database backend is selected by the \titlereference{db} keyword: \begin{quote}{\ttfamily \raggedright \noindent -S~=~MCMC(DisasterModel,~db='txt',~dirname='test') +S~=~MCMC(DisasterModel,~db='ram') }\end{quote} -This instructs the sampler to tally samples in txt files stored in a directory named \titlereference{test}. Other choices for the database are given in the table below, the default being \titlereference{ram}. When the \titlereference{sample} method is called, a \titlereference{chain} is created storing the sampled variables. The data in this chain can be accessed for each variable using its trace object -\begin{quote}{\ttfamily \raggedright \noindent -S.e.trace() -}\end{quote} - -When \titlereference{S.db.close()} is called, the data is flushed to disk. That is, directories are created for each chain, with samples from each stochastic variable in a separate file. To access this data during a following session, each database provides a \titlereference{load} function instantiating a \titlereference{Database} object -\begin{quote}{\ttfamily \raggedright \noindent -DB~=~Database.txt.load('test') -}\end{quote} +Here, we instructed the MCMC sampler to keep the trace in the computer's live memory. This means that when the Python session closes, all data will be lost. This is the default backend. -This object can then be linked to a model definition using +Each time MCMC's \titlereference{sample} method is called, a \titlereference{chain} is created storing the sampled variables. The data in this chain can be accessed for each variable using the call method of its trace attribute: \begin{quote}{\ttfamily \raggedright \noindent -S~=~Sampler(DisasterSampler,~db=DB) -}\end{quote} - -For some databases (\titlereference{hdf5}, \titlereference{pickle}), loading an existing database restores the previous state of the sampler. That is, the attribtues of the Sampler, its Stochastic parameters and StepMethods are all set to the value they had at the time \titlereference{D.db.close()} was called. - -The \titlereference{trace} object has the following signature .. {[}{\#}{]}: -\begin{quote}{\ttfamily \raggedright \noindent -trace(self,~~burn=0,~thin=1,~chain=-1,~slicing=None) +S.e.trace(burn=0,~thin=1,~chain=-1,~slicing=None) }\end{quote} with arguments having the following meaning: @@ -45,7 +30,7 @@ \section*{Accessing Sampled Data: User Interface} \item[{thin}] \leavevmode (\textbf{int}) -Number of samples to step. +The stride, ie the number of samples to step for each returned value. \item[{chain}] \leavevmode (\textbf{int or None}) @@ -56,7 +41,34 @@ \section*{Accessing Sampled Data: User Interface} Slice object used to parse the samples. Overrides burn and thin parameters. \end{description} -% [#]: The `trace` attribute of stochastic parameters is in fact an instance of a Trace class, defined for each backend. This class has a method called `gettrace` that returns the trace of the object, and which is called by `trace()` . + + +%___________________________________________________________________________ + +\hypertarget{loading-data-from-a-previous-session}{} +\pdfbookmark[0]{Loading data from a previous session}{loading-data-from-a-previous-session} +\section*{Loading data from a previous session} +\label{loading-data-from-a-previous-session} + +To store a copy of the trace on the hard disk, a number of backends are available: \titlereference{txt}, \titlereference{pickle}, \titlereference{hdf5}, \titlereference{sqlite} and \titlereference{mysql}. These all write the data to disk, in such a way that it can be loaded back in a following session and appended to. So for instance, to save data in ASCII format, we would do: +\begin{quote}{\ttfamily \raggedright \noindent +S~=~MCMC(DisasterModel,~db='txt',~dirname='disaster{\_}data')~\\ +S.sample(10000)~\\ +S.db.close() +}\end{quote} + +When \titlereference{S.db.close()} is called, the data is flushed to disk. That is, directories are created for each chain, with samples from each stochastic variable in a separate file. To access this data during a following session, each database provides a \titlereference{load} function instantiating a \titlereference{Database} object +\begin{quote}{\ttfamily \raggedright \noindent +DB~=~Database.txt.load('disaster{\_}data') +}\end{quote} + +This \titlereference{Database} object can then be linked to a model definition using +\begin{quote}{\ttfamily \raggedright \noindent +S~=~Sampler(DisasterSampler,~db=DB)~\\ +S.sample(10000) +}\end{quote} + +For some databases (\titlereference{hdf5}, \titlereference{pickle}), loading an existing database restores the previous state of the sampler. That is, the attributes of the Sampler, its Stochastic parameters and StepMethods are all set to the value they had at the time \titlereference{S.db.close()} was called. %___________________________________________________________________________ @@ -86,7 +98,7 @@ \subsection*{ram} \subsection*{txt} \label{txt} -The \titlereference{txt} backend is a modified \titlereference{ram} backend, the only difference being that when the database is closed, the data is written to disk in ascii files. More precisely, the data for each chain is stored in a directory called \titlereference{Chain{\_}{\textless}{\#}{\textgreater}}, the trace for each variable being stored in a file names`{\textless}variable name{\textgreater}.txt`. This backend makes it easy to load the data using another application, but for large datasets, files tend to be embarassingly large and slow to load into memory. +With the \titlereference{txt} backend, the data is written to disk in ASCII files when the class \titlereference{close()} method is called. More precisely, the data for each chain is stored in a directory called \titlereference{Chain{\_}{\textless}{\#}{\textgreater}}, the trace for each variable being stored in a file names`{\textless}variable name{\textgreater}.txt`. This backend makes it easy to load the data using another application, but for large datasets, files tend to be embarassingly large and slow to load into memory. %___________________________________________________________________________ @@ -96,7 +108,17 @@ \subsection*{txt} \subsection*{pickle} \label{pickle} -As its name implies, the \titlereference{pickle} database used the \titlereference{Cpickle} module to save the trace objects. Use of this backend is not suggested since the generated files may become unreadable after a Python update. +As its name implies, the \titlereference{pickle} database relies on the \titlereference{Cpickle} module to save the trace objects. Use of this backend is appropriate for small scale, short-lived projects. For longer term or larger projects, the \titlereference{pickle} backend should be avoided since generated files might be unreadable across different Python versions. + + +%___________________________________________________________________________ + +\hypertarget{hdf5}{} +\pdfbookmark[1]{hdf5}{hdf5} +\subsection*{hdf5} +\label{hdf5} + +The hdf5 backend uses \href{http://www.pytables.org/moin}{pyTables} to save data in binary HDF5 format. The main advantage of this backend is that data is flushed regularly to disk, reducing memory usage and allowing sampling of datasets much larger than the available RAM memory, speeding up data access. For this backend to work, pyTables must be installed, which in turn requires the hdf5 library. %___________________________________________________________________________ @@ -106,7 +128,7 @@ \subsection*{pickle} \subsection*{sqlite} \label{sqlite} -Chris ... +The sqlite backend is based on the python module sqlite3. It is not as mature as the other backends, in the sense that is does not support saving/restoring of state and plug and play reloading. %___________________________________________________________________________ @@ -116,17 +138,7 @@ \subsection*{sqlite} \subsection*{mysql} \label{mysql} -Chris ... - - -%___________________________________________________________________________ - -\hypertarget{hdf5}{} -\pdfbookmark[1]{hdf5}{hdf5} -\subsection*{hdf5} -\label{hdf5} - -The hdf5 backend uses pyTables to save data in binary HDF5 format. The main advantage of this backend is that data is flushed regularly to disk, reducing memory usage and allowing sampling of datasets much larger than the available memory. Data access is also very fast. +The mysql backend is based on the MySQLd python module. It also is not as mature as the other backends. \leavevmode \begin{longtable}[c]{|p{0.133\locallinewidth}|p{0.447\locallinewidth}|p{0.307\locallinewidth}|} @@ -144,13 +156,12 @@ \subsection*{hdf5} no{\_}trace & Do not tally samples at all. -Use only for testing purposes. & \\ \hline ram & -Store samples in memory. +Store samples in live memory. & \\ \hline @@ -166,6 +177,14 @@ \subsection*{hdf5} & \\ \hline +hdf5 + & +Store samples in the HDF5 format. + & +pytables ({\textgreater}2.0), libhdf5 + \\ +\hline + sqlite & Store samples in a sqlite database. @@ -181,14 +200,6 @@ \subsection*{hdf5} MySQLdb \\ \hline - -hdf5 - & -Store samples in the HDF5 format. - & -pytables ({\textgreater}2.0), libhdf5 - \\ -\hline \end{longtable} For more information about individual backends, refer to the \href{docs/API.pdf}{API} documentation. diff --git a/docs/distributions.py b/docs/distributions.py new file mode 100644 index 00000000000..18214a8faee --- /dev/null +++ b/docs/distributions.py @@ -0,0 +1,59 @@ +__docformat__='reStructuredText' +import pymc +""" +import re +objs = dir(pymc) +likelihood_pat = re.compile(r'\w+_like') +likelihoods = [] +for f in objs: + try: + match = likelihood_pat.match(f) + if match: + statement = 'from pymc import %s'%f + print statement + likelihoods.append(f) + exec(statement) + + except: + pass +print likelihoods +""" +__all__=['arlognormal_like', 'bernoulli_like', 'beta_like', 'binomial_like', 'categorical_like', 'cauchy_like', 'chi2_like', 'dirichlet_like', 'discrete_uniform_like', 'exponential_like', 'exponweib_like', 'gamma_like', 'geometric_like', 'gev_like', 'half_normal_like', 'hypergeometric_like', 'inverse_gamma_like', 'lognormal_like', 'mod_categor_like', 'mod_multinom_like', 'multinomial_like', 'multivariate_hypergeometric_like', 'mv_normal_chol_like', 'mv_normal_cov_like', 'mv_normal_like', 'negative_binomial_like', 'normal_like', 'one_over_x_like', 'poisson_like', 'skew_normal_like', 'truncnorm_like', 'uniform_like', 'uninformative_like', 'weibull_like', 'wishart_cov_like', 'wishart_like'] + + +from pymc import arlognormal_like +from pymc import bernoulli_like +from pymc import beta_like +from pymc import binomial_like +from pymc import categorical_like +from pymc import cauchy_like +from pymc import chi2_like +from pymc import dirichlet_like +from pymc import discrete_uniform_like +from pymc import exponential_like +from pymc import exponweib_like +from pymc import gamma_like +from pymc import geometric_like +from pymc import gev_like +from pymc import half_normal_like +from pymc import hypergeometric_like +from pymc import inverse_gamma_like +from pymc import lognormal_like +from pymc import mod_categor_like +from pymc import mod_multinom_like +from pymc import multinomial_like +from pymc import multivariate_hypergeometric_like +from pymc import mv_normal_chol_like +from pymc import mv_normal_cov_like +from pymc import mv_normal_like +from pymc import negative_binomial_like +from pymc import normal_like +from pymc import one_over_x_like +from pymc import poisson_like +from pymc import skew_normal_like +from pymc import truncnorm_like +from pymc import uniform_like +from pymc import uninformative_like +from pymc import weibull_like +from pymc import wishart_cov_like +from pymc import wishart_like diff --git a/docs/guide2.0.tex b/docs/guide2.0.tex index e196e31960a..5a8d560dc43 100644 --- a/docs/guide2.0.tex +++ b/docs/guide2.0.tex @@ -7,13 +7,13 @@ \documentclass[]{manual} % Use utf-8 encoding for foreign characters -\usepackage[utf8]{inputenc} +%\usepackage[utf8]{inputenc} % Setup for fullpage use \usepackage{fullpage} \usepackage{amsmath} \usepackage{epsfig} -\usepackage{pdfsync} +%\usepackage{pdfsync} % Flexible citation syntax \usepackage{natbib} @@ -37,7 +37,7 @@ % This is now the recommended way for checking for PDFLaTeX: %\usepackage{ifpdf} -% Enable hyeprlinks +% Enable hyperlinks \usepackage[pdfpagemode=FullScreen,colorlinks=true,linkcolor=red]{hyperref} %\ifpdf @@ -46,6 +46,23 @@ %\usepackage{graphicx} %\fi +%%% EPYDOC STUFF %%% +\usepackage{underscore} +\usepackage[english]{babel} +\usepackage{alltt, parskip, fancyhdr, boxedminipage} +\usepackage{makeidx, multirow, longtable, tocbibind, amssymb} +\usepackage{fullpage} +\usepackage[usenames]{color} +\usepackage{ifthen} +\usepackage{ae} +\usepackage{aeguill} +\usepackage{shortvrb} +\usepackage{ucs} +\usepackage{tabularx} +\input{epydocheader} +%%% END OF EPYDOC STUFF %%% + + \title{PyMC 2.0 User's Guide \\ Installation and tutorial} \author{ Christopher Fonnesbeck\\ David Huard \\ Anand Patil } @@ -58,7 +75,7 @@ \newcommand{\rubric}[1]{\subsection*{~\hfill {\it #1} \hfill ~}} \newcommand{\titlereference}[1]{\textsl{#1}} \newlength{\locallinewidth} -\setlength{\locallinewidth}{.5\textwidth} +\setlength{\locallinewidth}{7in} %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% @@ -113,7 +130,7 @@ \chapter{Probability distributions} \item A function that computes the expectation associated with the distribution: \texttt{normal_expval()}. \item A \texttt{Stochastic} subclass generated from the distribution: \texttt{Normal}. \end{itemize} -\input{distributions} +\input{pdf//distributions-module} \bibliographystyle{plainnat} \bibliography{pymc} diff --git a/docs/modelbuilding.tex b/docs/modelbuilding.tex index 6e3f53f9c61..7426f8f23c0 100644 --- a/docs/modelbuilding.tex +++ b/docs/modelbuilding.tex @@ -3,7 +3,7 @@ % \section{Summary}\label{sec:PyMCObjects} Bayesian inference begins with specification of a probability model relating unknown variables to data. PyMC provides three basic building blocks for Bayesian probability models: \texttt{Stochastic}, \texttt{Deterministic} and \texttt{Potential}. -A \texttt{Stochastic} object represents a variable whose value is not completely determined by its parents, and a \texttt{Deterministic} object represents a variable that is determined by its parents. \texttt{Stochastic} and \texttt{Deterministic} are subclasses of \texttt{Variable}. The third basic class, representing `factor potentials' (\cite{dawidmarkov,jordangraphical}), represents a arbitrary log-probability terms. \texttt{Potential} and \texttt{Variable} are subclasses of \texttt{Node}. +A \texttt{Stochastic} object represents a variable whose value is not completely determined by its parents, and a \texttt{Deterministic} object represents a variable that is determined by its parents. \texttt{Stochastic} and \texttt{Deterministic} are subclasses of \texttt{Variable}. The third basic class, representing `factor potentials' (\cite{dawidmarkov,jordangraphical}), represents an arbitrary log-probability term. \texttt{Potential} and \texttt{Variable} are subclasses of \texttt{Node}. % PyMC also provides container classes for variables to make it easier to program of certain dependency situations, such as when a variable is defined by its dependence on an entire Markov chain. @@ -223,7 +223,7 @@ \subsection*{Creation of deterministic variables} value[switchpoint:] = late_rate return value \end{verbatim} -The function supplied should return a new value (which may be any object) for the variable. Arguments' keys and values are converted into a parent dictionary as with \texttt{Stochastic}'s short interface. The function's \texttt{__name__} is passed on to the variable. The \texttt{deterministic} decorator can take \texttt{trace} and \texttt{verbose} arguments, like the \texttt{stochastic} decorator. +The function supplied should return a new value (which may be any object) for the variable. Arguments' keys and values are converted into a parent dictionary as with \texttt{Stochastic}'s short interface. The function's \mbox{\texttt{__name__}} is passed on to the variable. The \texttt{deterministic} decorator can take \texttt{trace} and \texttt{verbose} arguments, like the \texttt{stochastic} decorator. \item[Direct] The same variable could be created directly as follows: \begin{verbatim} def r_eval(switchpoint = s, early_rate = e, late_rate = l): diff --git a/docs/modelfitting.tex b/docs/modelfitting.tex index 7c97e7cd3d2..d4d07d796c7 100644 --- a/docs/modelfitting.tex +++ b/docs/modelfitting.tex @@ -4,9 +4,9 @@ Currently, PyMC provides three classes that actually fit models: \begin{itemize} + \item \texttt{MCMC}, which coordinates Markov Chain Monte Carlo algorithms. The actual work of updating stochastic variables conditional on the rest of the model is done by \texttt{StepMethod} instances, which are described in this chapter. \item \texttt{MAP}, which computes maximum a posteriori estimates. \item \texttt{NormApprox}, which computes the `normal approximation' \cite{gelman}: the joint distribution of all stochastic variables in a model is approximated as normal using local information at the maximum a posteriori estimate. - \item \texttt{MCMC}, which is coordinates Markov Chain Monte Carlo algorithms. The actual work of updating stochastic variables conditional on the rest of the model is done by \texttt{StepMethod} instances, which are described in this chapter. \end{itemize} This set will hopefully grow in future releases. @@ -117,7 +117,7 @@ \section*{The \texttt{Sampler} class} \label{sec:Sampler} \item[\texttt{halt()}:] Truncate the database and clean up. \end{description} \item[\texttt{tally()}:] Write all variables' current values to the database. - \item[\texttt{draw()}:] Not currently used. In future Monte Carlo fitting methods that aren't MCMC, such as importance samplers, the \texttt{draw()} method will be responsible for drawing approximate samples from the joint distribution (by setting the values all the stochastic variables in the model). + \item[\texttt{draw()}:] Not currently used. In future Monte Carlo fitting methods that aren't MCMC, such as importance samplers, the \texttt{draw()} method will be responsible for drawing approximate samples from the joint distribution (by setting the values of all the stochastic variables in the model). \item[\texttt{save\_state()}:] Saves the current state of the sampler, including all stochastics, to the database. This allows the sampler to be reconstituted at a later time to resume sampling. \item[\texttt{restore\_state()}:] Restores the sampler to the state stored in the database. \item[\texttt{plot()}:] Generates densities (or histograms) and trace plots of all nodes in the sampler. @@ -175,7 +175,7 @@ \section*{Markov chain Monte Carlo: the \texttt{MCMC} class} \label{sec:mcmc} \pdfbookmark[0]{The MCMC class}{mcmc} \texttt{MCMC} is a subclass of \texttt{Sampler}. At the beginning of a sampling loop, it assigns a \texttt{StepMethod} instance (section \ref{sec:stepmethod}) to each stochastic variable for which the user has not created one. Step methods are assigned as follows: each \texttt{StepMethod} subclass in existence is allowed to inspect the variable in question and determine its competence to handle the variable, on a scale of 0 to 3. An instance of the highest bidder is created to handle the variable. -MCMC samplers have the following methods, in addition those of \texttt{Sampler}: +MCMC samplers have the following methods, in addition to those of \texttt{Sampler}: \begin{description} \item[\texttt{sample(iter, burn=0, thin=1, tune\_interval=1000, verbose=0)}:] The \texttt{iter} argument controls the total number of MCMC iterations. No tallying will be done during the first \texttt{burn} iterations; these samples will be forgotten. After this burn-in period, tallying will be done each \texttt{thin} iterations. Tuning will be done each \texttt{tune\_interval} iterations, even after burn-in is complete \cite{tuning,Haario:2001lr}. \item[\texttt{isample(iter, burn=0, thin=1, tune\_interval=1000, verbose=0)}:] Interactive sampling; see \texttt{Sampler.isample}. diff --git a/docs/pymc.distributions-module.tex b/docs/pymc.distributions-module.tex new file mode 100644 index 00000000000..1a44262f79d --- /dev/null +++ b/docs/pymc.distributions-module.tex @@ -0,0 +1,4291 @@ +% +% API Documentation for API Documentation +% Module pymc.distributions +% +% Generated by epydoc 3.0beta1 +% [Tue Apr 29 09:12:28 2008] +% + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Module Description %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}|(} +\section{Module pymc.distributions} + + \label{pymc:distributions} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Functions %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsection{Functions} + + \label{pymc:distributions:bind_size} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.bind\_size \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{bind\_size}(\textit{randfun}, \textit{size}) + + \end{boxedminipage} + + \label{pymc:distributions:new_dist_class} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.new\_dist\_class \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{new\_dist\_class}(*\textit{new\_class\_args}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Returns a new class from a distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:stochastic_from_dist} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.stochastic\_from\_dist \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{stochastic\_from\_dist}(\textit{name}, \textit{logp}, \textit{random}=\texttt{None}, \textit{dtype}=\texttt{np.float}, \textit{mv}=\texttt{False}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Return a Stochastic subclass made from a particular distribution. +\begin{quote} +\end{quote} + +Arguments can be passed in positionally; in this case, argument order is: self{\_}name, parents. +value is set to None by default, and can only be provided as a keyword argument. + \vspace{1ex} + + \textbf{Example} + \begin{quote} + \begin{alltt} +\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }Exponential = stochastic\_from\_dist(\pysrcstring{'exponential'}, +\pysrcoutput{ logp=exponential\_like,} +\pysrcoutput{ random=rexponential,} +\pysrcoutput{ dtype=np.float,} +\pysrcoutput{ mv=False)} +\pysrcoutput{}\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }A = Exponential(self\_name, value, beta)\end{alltt} + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:Vectorize} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.Vectorize \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{Vectorize}(\textit{f}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Wrapper to vectorize a scalar function. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:randomwrap} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.randomwrap \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{randomwrap}(\textit{func}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Decorator for random value generators + +Allows passing of sequence of parameters, as well as a size argument. + +Convention: +\begin{quote} +\begin{itemize} +\item {} +If size=1 and the parameters are all scalars, return a scalar. + +\item {} +If size=1, the random variates are 1D. + +\item {} +If the parameters are scalars and size {\textgreater} 1, the random variates are 1D. + +\item {} +If size {\textgreater} 1 and the parameters are sequences, the random variates are +aligned as (size, max(length)), where length is the parameters size. + +\end{itemize} +\end{quote} + \vspace{1ex} + + \textbf{Example} + \begin{quote} + \begin{alltt} +\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }rbernoulli(.1) +\pysrcoutput{0} +\pysrcoutput{}\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }rbernoulli([.1,.9]) +\pysrcoutput{asarray([0, 1])} +\pysrcoutput{}\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }rbernoulli(.9, size=2) +\pysrcoutput{asarray([1, 1])} +\pysrcoutput{}\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }rbernoulli([.1,.9], 2) +\pysrcoutput{asarray([[0, 1],} +\pysrcoutput{ [0, 1]])}\end{alltt} + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:debugwrapper} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.debugwrapper \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{debugwrapper}(\textit{func}, \textit{name}) + + \end{boxedminipage} + + \label{pymc:distributions:constrain} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.constrain \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{constrain}(\textit{value}, \textit{lower}=\texttt{-np.Inf}, \textit{upper}=\texttt{np.Inf}, \textit{allow\_equal}=\texttt{False}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Apply interval constraint on stochastic value. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:standardize} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.standardize \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{standardize}(\textit{x}, \textit{loc}=\texttt{0}, \textit{scale}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Standardize x + +Return (x-loc)/scale + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:gammaln} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.gammaln \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{gammaln}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Logarithm of the Gamma function + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:expand_triangular} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.expand\_triangular \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{expand\_triangular}(\textit{X}, \textit{k}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expand flattened triangular matrix. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:GOFpoints} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.GOFpoints \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{GOFpoints}(\textit{x}, \textit{y}, \textit{expval}, \textit{loss}) + + \end{boxedminipage} + + \label{pymc:distributions:rarlognormal} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rarlognormal \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rarlognormal}(\textit{a}, \textit{sigma}, \textit{rho}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Autoregressive normal random variates. + +If a is a scalar, generates one series of length size. +If a is a sequence, generates size series of the same length +as a. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:arlognormal_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.arlognormal\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{arlognormal\_like}(\textit{x}, \textit{a}, \textit{sigma}, \textit{rho}, \textit{beta}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Autoregressive lognormal log-likelihood. +\begin{equation*}\begin{split}x_i & = a_i \exp(e_i) \\e_i & = \rho e_{i-1} + \epsilon_i\end{split}\end{equation*} +where $\epsilon_i \sim N(0,\sigma)$. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rbernoulli} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rbernoulli \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rbernoulli}(\textit{p}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random Bernoulli variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:bernoulli_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.bernoulli\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{bernoulli\_expval}(\textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of bernoulli distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:bernoulli_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.bernoulli\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{bernoulli\_like}(\textit{x}, \textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Bernoulli log-likelihood + +The Bernoulli distribution describes the probability of successes (x=1) and +failures (x=0). +\begin{equation*}\begin{split}f(x \mid p) = p^{x- 1} (1-p)^{1-x}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{x} + + \item[x] + + +Series of successes (1) and failures (0). $x=0,1$ + \item[p] + + +Probability of success. $0 < p < 1$ + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \textbf{Example} + \begin{quote} + \begin{alltt} +\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }bernoulli\_like([0,1,0,1], .4) +\pysrcoutput{-2.8542325496673584}\end{alltt} + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(x)= p$ + +\item {} +$Var(x)= p(1-p)$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rbeta} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rbeta \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rbeta}(\textit{alpha}, \textit{beta}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random beta variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:beta_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.beta\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{beta\_expval}(\textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of beta distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:beta_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.beta\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{beta\_like}(\textit{x}, \textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Beta log-likelihood. +\begin{equation*}\begin{split}f(x \mid \alpha, \beta) = \frac{\Gamma(\alpha + \beta)}{\Gamma(\alpha) \Gamma(\beta)} x^{\alpha - 1} (1 - x)^{\beta - 1}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +0 {\textless} x {\textless} 1 + \textit{(type=float)} + + \item[alpha] + + +{\textgreater} 0 + \textit{(type=float)} + + \item[beta] + + +{\textgreater} 0 + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \textbf{Example} + \begin{quote} + \begin{alltt} +\pysrcprompt{{\textgreater}{\textgreater}{\textgreater} }beta\_like(.4,1,2) +\pysrcoutput{0.18232160806655884}\end{alltt} + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(X)=\frac{\alpha}{\alpha+\beta}$ + +\item {} +$Var(X)=\frac{\alpha \beta}{(\alpha+\beta)^2(\alpha+\beta+1)}$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rbinomial} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rbinomial \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rbinomial}(\textit{n}, \textit{p}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random binomial variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:binomial_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.binomial\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{binomial\_expval}(\textit{n}, \textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of binomial distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:binomial_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.binomial\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{binomial\_like}(\textit{x}, \textit{n}, \textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Binomial log-likelihood. The discrete probability distribution of the +number of successes in a sequence of n independent yes/no experiments, +each of which yields success with probability p. +\begin{equation*}\begin{split}f(x \mid n, p) = \frac{n!}{x!(n-x)!} p^x (1-p)^{1-x}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{x} + + \item[x] + + +Number of successes, {\textgreater} 0. + \textit{(type=float)} + + \item[n] + + +Number of Bernoulli trials, {\textgreater} x. + \textit{(type=int)} + + \item[p] + + +Probability of success in each trial, $p \in [0,1]$. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(X)=np$ + +\item {} +$Var(X)=np(1-p)$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rcategorical} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rcategorical \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rcategorical}(\textit{p}, \textit{minval}=\texttt{0}, \textit{step}=\texttt{1}, \textit{size}=\texttt{1}) + + \end{boxedminipage} + + \label{pymc:distributions:categorical_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.categorical\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{categorical\_expval}(\textit{p}, \textit{minval}=\texttt{0}, \textit{step}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of categorical distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:categorical_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.categorical\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{categorical\_like}(\textit{x}, \textit{p}, \textit{minval}=\texttt{0}, \textit{step}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Categorical log-likelihood. +Accepts an array of probabilities associated with the histogram, +the minimum value of the histogram (defaults to zero), +and a step size (defaults to 1). + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rcauchy} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rcauchy \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rcauchy}(\textit{alpha}, \textit{beta}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Returns Cauchy random variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:cauchy_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.cauchy\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{cauchy\_expval}(\textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of cauchy distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:cauchy_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.cauchy\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{cauchy\_like}(\textit{x}, \textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Cauchy log-likelihood. The Cauchy distribution is also known as the +Lorentz or the Breit-Wigner distribution. +\begin{equation*}\begin{split}f(x \mid \alpha, \beta) = \frac{1}{\pi \beta [1 + (\frac{x-\alpha}{\beta})^2]}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[alpha] + + +Location parameter. + \textit{(type=float)} + + \item[beta] + + +Scale parameter {\textgreater} 0. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +Mode and median are at alpha. + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rchi2} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rchi2 \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rchi2}(\textit{nu}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random :math:'chi{\textasciicircum}2' variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:chi2_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.chi2\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{chi2\_expval}(\textit{nu}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of Chi-squared distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:chi2_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.chi2\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{chi2\_like}(\textit{x}, \textit{nu}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Chi-squared $\chi^2$ log-likelihood. +\begin{equation*}\begin{split}f(x \mid \nu) = \frac{x^{(\nu-2)/2}e^{-x/2}}{2^{\nu/2}\Gamma(\nu/2)}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xx} + + \item[x] + + +$\ge 0$ + \textit{(type=float)} + + \item[nu] + + +Degrees of freedom ( $nu > 0$) + \textit{(type=int)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(X)=\nu$ + +\item {} +$Var(X)=2\nu$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rdirichlet} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rdirichlet \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rdirichlet}(\textit{theta}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Dirichlet random variates. + +NOTE only the first k-1 values are returned. +The k'th value is equal to one minus the sum of the first k-1 values. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:dirichlet_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.dirichlet\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{dirichlet\_expval}(\textit{theta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of Dirichlet distribution. + +NOTE only the expectations of the first k-1 values are returned. +The expectation of the k'th value is one minus the expectations of +the first k-1 values. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:dirichlet_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.dirichlet\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{dirichlet\_like}(\textit{x}, \textit{theta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Dirichlet log-likelihood. + +This is a multivariate continuous distribution. +\begin{equation*}\begin{split}f(\mathbf{x}) = \frac{\Gamma(\sum_{i=1}^k \theta_i)}{\prod \Gamma(\theta_i)} \prod_{i=1}^k x_i^{\theta_i - 1}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +Where \texttt{n} is the number of samples and \texttt{k} the dimension. +$0 < x_i < 1$, $\sum_{i=1}^{k-1} x_i < 1$ + \textit{(type=(n,k-1) array)} + + \item[theta] + + +$\theta > 0$ + \textit{(type=(n,k) or (1,k) float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} +There is an \texttt{implicit} k'th value of x, equal to $\sum_{i=1}^{k-1} x_i$. + + + \end{boxedminipage} + + \label{pymc:distributions:rexponential} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rexponential \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rexponential}(\textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Exponential random variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:exponential_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.exponential\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{exponential\_expval}(\textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of exponential distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:exponential_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.exponential\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{exponential\_like}(\textit{x}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Exponential log-likelihood. + +The exponential distribution is a special case of the gamma distribution +with alpha=1. It often describes the duration of an event. +\begin{equation*}\begin{split}f(x \mid \beta) = \frac{1}{\beta}e^{-x/\beta}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxx} + + \item[x] + + +$x \ge 0$ + \textit{(type=float)} + + \item[beta] + + +Survival parameter $\beta > 0$ + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(X) = \beta$ + +\item {} +$Var(X) = \beta^2$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rexponweib} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rexponweib \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rexponweib}(\textit{alpha}, \textit{k}, \textit{loc}=\texttt{0}, \textit{scale}=\texttt{1}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random exponentiated Weibull variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:exponweib_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.exponweib\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{exponweib\_expval}(\textit{alpha}, \textit{k}, \textit{loc}, \textit{scale}) + + \end{boxedminipage} + + \label{pymc:distributions:exponweib_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.exponweib\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{exponweib\_like}(\textit{x}, \textit{alpha}, \textit{k}, \textit{loc}=\texttt{0}, \textit{scale}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Exponentiated Weibull log-likelihood. +\begin{equation*}\begin{split}f(x \mid \alpha,k,loc,scale) & = \frac{\alpha k}{scale} (1-e^{-z^c})^{\alpha-1} e^{-z^c} z^{k-1} \\z & = \frac{x-loc}{scale}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +{\textgreater} 0 + \textit{(type=float)} + + \item[alpha] + + +Shape parameter + \textit{(type=float)} + + \item[k] + + +{\textgreater} 0 + \textit{(type=float)} + + \item[loc] + + +Location parameter + \textit{(type=float)} + + \item[scale] + + +Scale parameter {\textgreater} 0. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rgamma} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rgamma \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rgamma}(\textit{alpha}, \textit{beta}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random gamma variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:gamma_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.gamma\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{gamma\_expval}(\textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of gamma distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:gamma_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.gamma\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{gamma\_like}(\textit{x}, \textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Gamma log-likelihood. + +Represents the sum of alpha exponentially distributed random variables, each +of which has mean beta. +\begin{equation*}\begin{split}f(x \mid \alpha, \beta) = \frac{\beta^{\alpha}x^{\alpha-1}e^{-\beta x}}{\Gamma(\alpha)}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +$x \ge 0$ + \textit{(type=float)} + + \item[alpha] + + +Shape parameter $\alpha > 0$. + \textit{(type=float)} + + \item[beta] + + +Scale parameter $\beta > 0$. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rgev} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rgev \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rgev}(\textit{xi}, \textit{mu}=\texttt{0}, \textit{sigma}=\texttt{0}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random generalized extreme value (GEV) variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:gev_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.gev\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{gev\_expval}(\textit{xi}, \textit{mu}=\texttt{0}, \textit{sigma}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of generalized extreme value distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:gev_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.gev\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{gev\_like}(\textit{x}, \textit{xi}, \textit{mu}=\texttt{0}, \textit{sigma}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Generalized Extreme Value log-likelihood +\begin{equation*}\begin{split}pdf(x \mid \xi,\mu,\sigma) = \frac{1}{\sigma}(1 + \xi \left[\frac{x-\mu}{\sigma}\right])^{-1/\xi-1}\exp{-(1+\xi \left[\frac{x-\mu}{\sigma}\right])^{-1/\xi}}\end{split}\end{equation*}\begin{equation*}\begin{split}\sigma & > 0,\\x & > \mu-\sigma/\xi \text{ if } \xi > 0,\\x & < \mu-\sigma/\xi \text{ if } \xi < 0\\x & \in [-\infty,\infty] \text{ if } \xi = 0\end{split}\end{equation*} \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rgeometric} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rgeometric \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rgeometric}(\textit{p}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random geometric variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:geometric_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.geometric\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{geometric\_expval}(\textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of geometric distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:geometric_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.geometric\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{geometric\_like}(\textit{x}, \textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Geometric log-likelihood. The probability that the first success in a +sequence of Bernoulli trials occurs after x trials. +\begin{equation*}\begin{split}f(x \mid p) = p(1-p)^{x-1}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{x} + + \item[x] + + +Number of trials before first success, {\textgreater} 0. + \textit{(type=int)} + + \item[p] + + +Probability of success on an individual trial, :math:'p in {[}0,1{]}' + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +:math:'E(X)=1/p' + +\item {} +:math:'Var(X)=frac{\{}1-p{\}}{\{}p{\textasciicircum}2{\}}' + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rhalf_normal} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rhalf\_normal \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rhalf\_normal}(\textit{tau}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random half-normal variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:half_normal_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.half\_normal\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{half\_normal\_expval}(\textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of half normal distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:half_normal_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.half\_normal\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{half\_normal\_like}(\textit{x}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Half-normal log-likelihood, a normal distribution with mean 0 and limited +to the domain :math:'x in {[}0, infty)'. +\begin{equation*}\begin{split}f(x \mid \tau) = \sqrt{\frac{2\tau}{\pi}}\exp\left\{ {\frac{-x^2 \tau}{2}}\right\}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxx} + + \item[x] + + +:math:'x ge 0' + \textit{(type=float)} + + \item[tau] + + +:math:'tau {\textgreater} 0' + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rhypergeometric} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rhypergeometric \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rhypergeometric}(\textit{n}, \textit{m}, \textit{N}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Returns hypergeometric random variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:hypergeometric_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.hypergeometric\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{hypergeometric\_expval}(\textit{n}, \textit{m}, \textit{N}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of hypergeometric distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:hypergeometric_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.hypergeometric\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{hypergeometric\_like}(\textit{x}, \textit{n}, \textit{m}, \textit{N}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} +\begin{alltt} +Hypergeometric log-likelihood. Discrete probability distribution that +describes the number of successes in a sequence of draws from a finite +population without replacement. + +.. math:: + f(x {\textbackslash}mid n, m, N) = {\textbackslash}frac\{{\textbackslash}binom\{m\}\{x\}{\textbackslash}binom\{N-m\}\{n-x\}\}\{{\textbackslash}binom\{N\}\{n\}\} + +:Parameters: + x : int + Number of successes in a sample drawn from a population. + :math:`{\textbackslash}max(0, draws-failures) {\textbackslash}leq x {\textbackslash}leq {\textbackslash}min(draws, success)` + n : int + Size of sample drawn from the population. + m : int + Number of successes in the population. + N : int + Total number of units in the population. + +:Note: + :math:`E(X) = {\textbackslash}frac\{n n\}\{N\}` +\end{alltt} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rinverse_gamma} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rinverse\_gamma \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rinverse\_gamma}(\textit{alpha}, \textit{beta}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random inverse gamma variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:inverse_gamma_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.inverse\_gamma\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{inverse\_gamma\_expval}(\textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of inverse gamma distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:inverse_gamma_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.inverse\_gamma\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{inverse\_gamma\_like}(\textit{x}, \textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Inverse gamma log-likelihood, the reciprocal of the gamma distribution. +\begin{equation*}\begin{split}f(x \mid \alpha, \beta) = \frac{\beta^{\alpha}}{\Gamma(\alpha)} x^{-\alpha - 1} \exp\left(\frac{-\beta}{x}\right)\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +x {\textgreater} 0 + \textit{(type=float)} + + \item[alpha] + + +Shape parameter, $\alpha > 0$. + \textit{(type=float)} + + \item[beta] + + +Scale parameter, $\beta > 0$. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} +$E(X)=\frac{1}{\beta(\alpha-1)}$ for $\alpha > 1$. + + + \end{boxedminipage} + + \label{pymc:distributions:rlognormal} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rlognormal \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rlognormal}(\textit{mu}, \textit{tau}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Return random lognormal variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:lognormal_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.lognormal\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{lognormal\_expval}(\textit{mu}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of log-normal distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:lognormal_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.lognormal\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{lognormal\_like}(\textit{x}, \textit{mu}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Log-normal log-likelihood. Distribution of any random variable whose +logarithm is normally distributed. A variable might be modeled as +log-normal if it can be thought of as the multiplicative product of many +small independent factors. +\begin{equation*}\begin{split}f(x \mid \mu, \tau) = \sqrt{\frac{\tau}{2\pi}}\frac{\exp\left\{ -\frac{\tau}{2} (\ln(x)-\mu)^2 \right\}}{x}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxx} + + \item[x] + + +x {\textgreater} 0 + \textit{(type=float)} + + \item[mu] + + +Location parameter. + \textit{(type=float)} + + \item[tau] + + +Scale parameter, {\textgreater} 0. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} +$E(X)=e^{\mu+\frac{1}{2\tau}}$ + + + \end{boxedminipage} + + \label{pymc:distributions:rmultinomial} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rmultinomial \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rmultinomial}(\textit{n}, \textit{p}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random multinomial variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:multinomial_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.multinomial\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{multinomial\_expval}(\textit{n}, \textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of multinomial distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:multinomial_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.multinomial\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{multinomial\_like}(\textit{x}, \textit{n}, \textit{p}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Multinomial log-likelihood with k-1 bins. Generalization of the binomial +distribution, but instead of each trial resulting in ``success'' or +``failure'', each one results in exactly one of some fixed finite number k +of possible outcomes over n independent trials. 'x{[}i{]}' indicates the number +of times outcome number i was observed over the n trials. +\begin{equation*}\begin{split}f(x \mid n, p) = \frac{n!}{\prod_{i=1}^k x_i!} \prod_{i=1}^k p_i^{x_i}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{x} + + \item[x] + + +Random variable indicating the number of time outcome i is observed, +$\sum_{i=1}^k x_i=n$, $x_i \ge 0$. + \textit{(type=(ns, k) int)} + + \item[n] + + +Number of trials. + \textit{(type=int)} + + \item[p] + + +Probability of each one of the different outcomes, +$\sum_{i=1}^k p_i = 1)$, $p_i \ge 0$. + \textit{(type=(k,) float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(X_i)=n p_i$ + +\item {} +$var(X_i)=n p_i(1-p_i)$ + +\item {} +$cov(X_i,X_j) = -n p_i p_j$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rmultivariate_hypergeometric} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rmultivariate\_hypergeometric \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rmultivariate\_hypergeometric}(\textit{n}, \textit{m}, \textit{size}=\texttt{None}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random multivariate hypergeometric variates. + +n : Number of draws. +m : Number of items in each category. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:multivariate_hypergeometric_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.multivariate\_hypergeometric\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{multivariate\_hypergeometric\_expval}(\textit{n}, \textit{m}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of multivariate hypergeometric distribution. + +n : number of items drawn. +m : number of items in each category. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:multivariate_hypergeometric_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.multivariate\_hypergeometric\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{multivariate\_hypergeometric\_like}(\textit{x}, \textit{m}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} +\begin{alltt} +The multivariate hypergeometric describes the probability of drawing x[i] +elements of the ith category, when the number of items in each category is +given by m. + + +.. math:: + {\textbackslash}frac\{{\textbackslash}prod\_i {\textbackslash}binom\{m\_i\}\{c\_i\}\}\{{\textbackslash}binom\{N\}\{n]\} + +where :math:`N = {\textbackslash}sum\_i m\_i` and :math:`n = {\textbackslash}sum\_i x\_i`. + +:Parameters: + x : int sequence + Number of draws from each category, :math:`{\textless} m` + m : int sequence + Number of items in each categoy. +\end{alltt} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rmv_normal} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rmv\_normal \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rmv\_normal}(\textit{mu}, \textit{tau}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random multivariate normal variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:mv_normal_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mv\_normal\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mv\_normal\_expval}(\textit{mu}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of multivariate normal distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:mv_normal_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mv\_normal\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mv\_normal\_like}(\textit{x}, \textit{mu}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Multivariate normal log-likelihood +\begin{equation*}\begin{split}f(x \mid \pi, T) = \frac{T^{n/2}}{(2\pi)^{1/2}} \exp\left\{ -\frac{1}{2} (x-\mu)^{\prime}T(x-\mu) \right\}\end{split}\end{equation*} +x: (n,k) +mu: (k) +tau: (k,k) +tau positive definite + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rmv_normal_cov} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rmv\_normal\_cov \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rmv\_normal\_cov}(\textit{mu}, \textit{C}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random multivariate normal variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:mv_normal_cov_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mv\_normal\_cov\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mv\_normal\_cov\_expval}(\textit{mu}, \textit{C}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of multivariate normal distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:mv_normal_cov_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mv\_normal\_cov\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mv\_normal\_cov\_like}(\textit{x}, \textit{mu}, \textit{C}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Multivariate normal log-likelihood +\begin{equation*}\begin{split}f(x \mid \pi, C) = \frac{T^{n/2}}{(2\pi)^{1/2}} \exp\left\{ -\frac{1}{2} (x-\mu)^{\prime}C^{-1}(x-\mu) \right\}\end{split}\end{equation*} +x: (n,k) +mu: (k) +C: (k,k) +C positive definite + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rmv_normal_chol} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rmv\_normal\_chol \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rmv\_normal\_chol}(\textit{mu}, \textit{sig}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random multivariate normal variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:mv_normal_chol_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mv\_normal\_chol\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mv\_normal\_chol\_expval}(\textit{mu}, \textit{sig}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of multivariate normal distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:mv_normal_chol_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mv\_normal\_chol\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mv\_normal\_chol\_like}(\textit{x}, \textit{mu}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Multivariate normal log-likelihood +\begin{equation*}\begin{split}f(x \mid \pi, \sigma) = \frac{T^{n/2}}{(2\pi)^{1/2}} \exp\left\{ -\frac{1}{2} (x-\mu)^{\prime}\sigma \sigma^{\prime}(x-\mu) \right\}\end{split}\end{equation*} +x: (n,k) +mu: (k) +sigma: (k,k) +sigma lower triangular + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rnegative_binomial} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rnegative\_binomial \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rnegative\_binomial}(\textit{mu}, \textit{alpha}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random negative binomial variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:negative_binomial_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.negative\_binomial\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{negative\_binomial\_expval}(\textit{mu}, \textit{alpha}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of negative binomial distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:negative_binomial_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.negative\_binomial\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{negative\_binomial\_like}(\textit{x}, \textit{mu}, \textit{alpha}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Negative binomial log-likelihood +\begin{equation*}\begin{split}f(x \mid r, p) = \frac{(x+r-1)!}{x! (r-1)!} p^r (1-p)^x\end{split}\end{equation*} +x {\textgreater} 0, mu {\textgreater} 0, alpha {\textgreater} 0 + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rnormal} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rnormal \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rnormal}(\textit{mu}, \textit{tau}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random normal variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:normal_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.normal\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{normal\_expval}(\textit{mu}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of normal distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:normal_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.normal\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{normal\_like}(\textit{x}, \textit{mu}, \textit{tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Normal log-likelihood. +\begin{equation*}\begin{split}f(x \mid \mu, \tau) = \sqrt{\frac{\tau}{2\pi}} \exp\left\{ -\frac{\tau}{2} (x-\mu)^2 \right\}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxx} + + \item[x] + + +Input data. + \textit{(type=float)} + + \item[mu] + + +Mean of the distribution. + \textit{(type=float)} + + \item[tau] + + +Precision of the distribution, {\textgreater} 0. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(X) = \mu$ + +\item {} +$Var(X) = 1/\tau$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rpoisson} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rpoisson \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rpoisson}(\textit{mu}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random poisson variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:poisson_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.poisson\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{poisson\_expval}(\textit{mu}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of Poisson distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:poisson_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.poisson\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{poisson\_like}(\textit{x}, \textit{mu}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Poisson log-likelihood. The Poisson is a discrete probability distribution. +It expresses the probability of a number of events occurring in a fixed +period of time if these events occur with a known average rate, and are +independent of the time since the last event. The Poisson distribution can +be derived as a limiting case of the binomial distribution. +\begin{equation*}\begin{split}f(x \mid \mu) = \frac{e^{-\mu}\mu^x}{x!}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xx} + + \item[x] + + +$x \in {0,1,2,...}$ + \textit{(type=int)} + + \item[mu] + + +Expected number of occurrences that occur during the given interval, +$\mu \geq 0$. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +$E(x)=\mu$ + +\item {} +$Var(x)=\mu$ + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rtruncnorm} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rtruncnorm \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rtruncnorm}(\textit{mu}, \textit{sigma}, \textit{a}, \textit{b}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random truncated normal variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:truncnorm_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.truncnorm\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{truncnorm\_expval}(\textit{mu}, \textit{sigma}, \textit{a}, \textit{b}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +E(X)=mu + rac{\{}sigma( arphi{\_}1- arphi{\_}2){\}}{\{}T{\}}, where T=Phileft( rac{\{}B-mu{\}}{\{}sigma{\}} +ight)-Phileft( rac{\{}A-mu{\}}{\{}sigma{\}} +ight) and arphi{\_}1 = arphileft( rac{\{}A-mu{\}}{\{}sigma{\}} +ight) and arphi{\_}2 = arphileft( rac{\{}B-mu{\}}{\{}sigma{\}} +ight), where arphi is the probability density function of a standard normal random variable. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:truncnorm_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.truncnorm\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{truncnorm\_like}(\textit{x}, \textit{mu}, \textit{sigma}, \textit{a}, \textit{b}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Truncated normal log-likelihood. +\begin{equation*}\begin{split}f(x \mid \mu, \sigma, a, b) = \frac{\phi(\frac{x-\mu}{\sigma})} {\Phi(\frac{b-\mu}{\sigma}) - \Phi(\frac{a-\mu}{\sigma})},\end{split}\end{equation*} \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rskew_normal} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rskew\_normal \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rskew\_normal}(\textit{mu}, \textit{tau}, \textit{alpha}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Skew-normal random variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:skew_normal_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.skew\_normal\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{skew\_normal\_like}(\textit{x}, \textit{mu}, \textit{tau}, \textit{alpha}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Azzalini's skew-normal log-likelihood +\begin{equation*}\begin{split}f(x \mid \mu, \tau, \alpha) = 2 \Phi((x-\mu)\sqrt{tau}\alpha) \phi(x,\mu,\tau)\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +Input data. + \textit{(type=float)} + + \item[mu] + + +Mean of the distribution. + \textit{(type=float)} + + \item[tau] + + +Precision of the distribution, {\textgreater} 0. + \textit{(type=float)} + + \item[alpha] + + +Shape parameter of the distribution. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +See \href{http://azzalini.stat.unipd.it/SN/}{http://azzalini.stat.unipd.it/SN/} + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:skew_normal_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.skew\_normal\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{skew\_normal\_expval}(\textit{mu}, \textit{tau}, \textit{alpha}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expectation of skew-normal random variables. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rdiscrete_uniform} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rdiscrete\_uniform \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rdiscrete\_uniform}(\textit{lower}, \textit{upper}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random discrete{\_}uniform variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:discrete_uniform_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.discrete\_uniform\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{discrete\_uniform\_expval}(\textit{lower}, \textit{upper}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of discrete{\_}uniform distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:discrete_uniform_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.discrete\_uniform\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{discrete\_uniform\_like}(\textit{x}, \textit{lower}, \textit{upper}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +discrete{\_}uniform log-likelihood. +\begin{equation*}\begin{split}f(x \mid lower, upper) = \frac{1}{upper-lower}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +$lower \geq x \geq upper$ + \textit{(type=float)} + + \item[lower] + + +Lower limit. + \textit{(type=float)} + + \item[upper] + + +Upper limit. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:runiform} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.runiform \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{runiform}(\textit{lower}, \textit{upper}, \textit{size}=\texttt{1}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Random uniform variates. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:uniform_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.uniform\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{uniform\_expval}(\textit{lower}, \textit{upper}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of uniform distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:uniform_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.uniform\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{uniform\_like}(\textit{x}, \textit{lower}, \textit{upper}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Uniform log-likelihood. +\begin{equation*}\begin{split}f(x \mid lower, upper) = \frac{1}{upper-lower}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +$lower \geq x \geq upper$ + \textit{(type=float)} + + \item[lower] + + +Lower limit. + \textit{(type=float)} + + \item[upper] + + +Upper limit. + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rweibull} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rweibull \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rweibull}(\textit{alpha}, \textit{beta}, \textit{size}=\texttt{1}) + + \end{boxedminipage} + + \label{pymc:distributions:weibull_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.weibull\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{weibull\_expval}(\textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of weibull distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:weibull_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.weibull\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{weibull\_like}(\textit{x}, \textit{alpha}, \textit{beta}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Weibull log-likelihood +\begin{equation*}\begin{split}f(x \mid \alpha, \beta) = \frac{\alpha x^{\alpha - 1}\exp(-(\frac{x}{\beta})^{\alpha})}{\beta^\alpha}\end{split}\end{equation*} \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxxxx} + + \item[x] + + +:math:'x ge 0' + \textit{(type=float)} + + \item[alpha] + + +{\textgreater} 0 + \textit{(type=float)} + + \item[beta] + + +{\textgreater} 0 + \textit{(type=float)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + +\textbf{Note:} \begin{itemize} +\item {} +:math:'E(x)=beta Gamma(1+frac{\{}1{\}}{\{}alpha{\}})' + +\item {} +:math:'Var(x)=beta{\textasciicircum}2 Gamma(1+frac{\{}2{\}}{\{}alpha{\}} - mu{\textasciicircum}2)' + +\end{itemize} + + + \end{boxedminipage} + + \label{pymc:distributions:rwishart} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rwishart \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rwishart}(\textit{n}, \textit{Tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Return a Wishart random matrix. + +Tau is the inverse of the 'covariance' matrix :math:'C'. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:wishart_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.wishart\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{wishart\_expval}(\textit{n}, \textit{Tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of wishart distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:wishart_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.wishart\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{wishart\_like}(\textit{X}, \textit{n}, \textit{Tau}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Wishart log-likelihood. The Wishart distribution is the probability +distribution of the maximum-likelihood estimator (MLE) of the precision +matrix of a multivariate normal distribution. If Tau=1, the distribution +is identical to the chi-square distribution with n degrees of freedom. +\begin{equation*}\begin{split}f(X \mid n, T) = {\mid T \mid}^{n/2}{\mid X \mid}^{(n-k-1)/2} \exp\left\{ -\frac{1}{2} Tr(TX) \right\}\end{split}\end{equation*} +where :math:'k' is the rank of X. + \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{xxx} + + \item[X] + + +Symmetric, positive definite. + \textit{(type=matrix)} + + \item[n] + + +Degrees of freedom, {\textgreater} 0. + \textit{(type=int)} + + \item[Tau] + + +Symmetric and positive definite + \textit{(type=matrix)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:rwishart_cov} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.rwishart\_cov \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rwishart\_cov}(\textit{n}, \textit{C}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Return a Wishart random matrix. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:wishart_cov_expval} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.wishart\_cov\_expval \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{wishart\_cov\_expval}(\textit{n}, \textit{C}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Expected value of wishart distribution. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:wishart_cov_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.wishart\_cov\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{wishart\_cov\_like}(\textit{X}, \textit{n}, \textit{C}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +PLEASE CHECK THIS DOCSTRING +wishart{\_}like(X, n, C) + +Wishart log-likelihood. The Wishart distribution is the probability +distribution of the maximum-likelihood estimator (MLE) of the covariance +matrix of a multivariate normal distribution. If Tau=1, the distribution +is identical to the chi-square distribution with n degrees of freedom. +\begin{equation*}\begin{split}f(X \mid n, T) = {\mid T \mid}^{n/2}{\mid X \mid}^{(n-k-1)/2} \exp\left\{ -\frac{1}{2} Tr(TX) \right\}\end{split}\end{equation*} +where :math:'k' is the rank of X. + \vspace{1ex} + + \textbf{Parameters} + \begin{quote} + \begin{Ventry}{x} + + \item[X] + + +Symmetric, positive definite. + \textit{(type=matrix)} + + \item[n] + + +Degrees of freedom, {\textgreater} 0. + \textit{(type=int)} + + \item[C] + + +Symmetric and positive definite + \textit{(type=matrix)} + + \end{Ventry} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:name_to_funcs} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.name\_to\_funcs \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{name\_to\_funcs}(\textit{name}, \textit{module}) + + \end{boxedminipage} + + \label{pymc:distributions:valuewrapper} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.valuewrapper \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{valuewrapper}(\textit{f}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Return a likelihood accepting value instead of x as a keyword argument. +This is specifically intended for the instantiator above. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:random_method_wrapper} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.random\_method\_wrapper \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{random\_method\_wrapper}(\textit{f}, \textit{size}, \textit{shape}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Wraps functions to return values of appropriate shape. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:fortranlike} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.fortranlike \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{fortranlike}(\textit{f}, \textit{snapshot}, \textit{mv}=\texttt{False}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + + +%___________________________________________________________________________ + +\hypertarget{decorator-function-for-fortran-likelihoods}{} +\pdfbookmark[2]{Decorator function for fortran likelihoods}{decorator-function-for-fortran-likelihoods} +\subsubsection*{Decorator function for fortran likelihoods} +\label{decorator-function-for-fortran-likelihoods} + +Wrap function f({\color{red}\bfseries{}*}args, {\color{red}\bfseries{}**}kwds) where f is a likelihood defined in flib. + +Assume args = (x, parameter1, parameter2, ...) +Before passing the arguments to the function, the wrapper makes sure that +the parameters have the same shape as x. + +mv: multivariate (True/False) + + +%___________________________________________________________________________ + +\hypertarget{add-compatibility-with-gof-goodness-of-fit-tests}{} +\pdfbookmark[3]{Add compatibility with GoF (Goodness of Fit) tests}{add-compatibility-with-gof-goodness-of-fit-tests} +\paragraph*{Add compatibility with GoF (Goodness of Fit) tests} +\label{add-compatibility-with-gof-goodness-of-fit-tests} +\begin{itemize} +\item {} +Add a 'prior' keyword (True/False) + +\item {} +If the keyword gof is given and is True, return the GoF (Goodness of Fit) + +\end{itemize} + +points instead of the likelihood. +* A 'loss' keyword can be given, to specify the loss function used in the +computation of the GoF points. +* If the keyword random is given and True, return a random variate instead +of the likelihood. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:local_decorated_likelihoods} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.local\_decorated\_likelihoods \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{local\_decorated\_likelihoods}(\textit{obj}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +New interface likelihoods + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:uninformative_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.uninformative\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{uninformative\_like}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Uninformative log-likelihood. Returns 0 regardless of the value of x. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:one_over_x_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.one\_over\_x\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{one\_over\_x\_like}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +returns -np.Inf if x{\textless}0, -np.log(x) otherwise. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:distributions:extend_dirichlet} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.extend\_dirichlet \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{extend\_dirichlet}(\textit{p}) + + \end{boxedminipage} + + \label{pymc:distributions:mod_categor_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mod\_categor\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mod\_categor\_like}(\textit{x}, \textit{p}, \textit{minval}=\texttt{0}, \textit{step}=\texttt{1}) + + \end{boxedminipage} + + \label{pymc:distributions:mod_rcategor} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mod\_rcategor \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mod\_rcategor}(\textit{p}, \textit{minval}, \textit{step}, \textit{size}=\texttt{1}) + + \end{boxedminipage} + + \label{pymc:distributions:mod_rmultinom} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mod\_rmultinom \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mod\_rmultinom}(\textit{n}, \textit{p}) + + \end{boxedminipage} + + \label{pymc:distributions:mod_multinom_like} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.mod\_multinom\_like \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{mod\_multinom\_like}(\textit{x}, \textit{n}, \textit{p}) + + \end{boxedminipage} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Variables %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsection{Variables} + +\begin{longtable}{|p{.30\textwidth}|p{.62\textwidth}|l} +\cline{1-2} +\cline{1-2} \centering \textbf{Name} & \centering \textbf{Description}& \\ +\cline{1-2} +\endhead\cline{1-2}\multicolumn{3}{r}{\small\textit{continued on next page}}\\\endfoot\cline{1-2} +\endlastfoot\raggedright f\-l\-i\-b\-\_\-b\-l\-a\-s\-\_\-O\-K\- & \raggedright \textbf{Value:} +{\tt False}&\\ +\cline{1-2} +\raggedright r\-a\-n\-d\-o\-m\-\_\-n\-u\-m\-b\-e\-r\- & \raggedright \textbf{Value:} +{\tt np.random.random}&\\ +\cline{1-2} +\raggedright i\-n\-v\-e\-r\-s\-e\- & \raggedright \textbf{Value:} +{\tt np.linalg.pinv}&\\ +\cline{1-2} +\raggedright s\-c\-\_\-c\-o\-n\-t\-i\-n\-u\-o\-u\-s\-\_\-d\-i\-s\-t\-r\-i\-b\-u\-t\-i\-o\-n\-s\- & \raggedright \textbf{Value:} +{\tt ['bernoulli', 'beta', 'cauchy', 'chi2', 'exponential', 'e\texttt{...}}&\\ +\cline{1-2} +\raggedright s\-c\-\_\-d\-i\-s\-c\-r\-e\-t\-e\-\_\-d\-i\-s\-t\-r\-i\-b\-u\-t\-i\-o\-n\-s\- & \raggedright \textbf{Value:} +{\tt ['binomial', 'poisson', 'negative\_binomial', 'categorical\texttt{...}}&\\ +\cline{1-2} +\raggedright m\-v\-\_\-c\-o\-n\-t\-i\-n\-u\-o\-u\-s\-\_\-d\-i\-s\-t\-r\-i\-b\-u\-t\-i\-o\-n\-s\- & \raggedright \textbf{Value:} +{\tt ['dirichlet', 'mv\_normal', 'mv\_normal\_cov', 'mv\_normal\_ch\texttt{...}}&\\ +\cline{1-2} +\raggedright m\-v\-\_\-d\-i\-s\-c\-r\-e\-t\-e\-\_\-d\-i\-s\-t\-r\-i\-b\-u\-t\-i\-o\-n\-s\- & \raggedright \textbf{Value:} +{\tt ['multivariate\_hypergeometric', 'multinomial']}&\\ +\cline{1-2} +\raggedright a\-v\-a\-i\-l\-a\-b\-l\-e\-d\-i\-s\-t\-r\-i\-b\-u\-t\-i\-o\-n\-s\- & \raggedright \textbf{Value:} +{\tt sc\_continuous\_distributions+ sc\_discrete\_distributions+ m\texttt{...}}&\\ +\cline{1-2} +\raggedright c\-a\-p\-i\-t\-a\-l\-i\-z\-e\- & \raggedright \textbf{Value:} +{\tt lambda name:}&\\ +\cline{1-2} +\raggedright a\-b\-s\-o\-l\-u\-t\-e\-\_\-l\-o\-s\-s\- & \raggedright \textbf{Value:} +{\tt lambda o, e:}&\\ +\cline{1-2} +\raggedright s\-q\-u\-a\-r\-e\-d\-\_\-l\-o\-s\-s\- & \raggedright \textbf{Value:} +{\tt lambda o, e:}&\\ +\cline{1-2} +\raggedright c\-h\-i\-\_\-s\-q\-u\-a\-r\-e\-\_\-l\-o\-s\-s\- & \raggedright \textbf{Value:} +{\tt lambda o, e:}&\\ +\cline{1-2} +\raggedright s\-n\-a\-p\-s\-h\-o\-t\- & \raggedright \textbf{Value:} +{\tt locals().copy()}&\\ +\cline{1-2} +\raggedright l\-i\-k\-e\-l\-i\-h\-o\-o\-d\-s\- & \raggedright \textbf{Value:} +{\tt \{\}}&\\ +\cline{1-2} +\raggedright B\-e\-r\-n\-o\-u\-l\-l\-i\- & \raggedright \textbf{Value:} +{\tt stochastic\_from\_dist('bernoulli', dist\_logp, dist\_random,\texttt{...}}&\\ +\cline{1-2} +\raggedright U\-n\-i\-n\-f\-o\-r\-m\-a\-t\-i\-v\-e\- & \raggedright \textbf{Value:} +{\tt stochastic\_from\_dist('uninformative', logp= uninformative\texttt{...}}&\\ +\cline{1-2} +\raggedright D\-i\-s\-c\-r\-e\-t\-e\-U\-n\-i\-n\-f\-o\-r\-m\-a\-t\-i\-v\-e\- & \raggedright \textbf{Value:} +{\tt stochastic\_from\_dist('uninformative', logp= uninformative\texttt{...}}&\\ +\cline{1-2} +\raggedright O\-n\-e\-O\-v\-e\-r\-X\- & \raggedright \textbf{Value:} +{\tt stochastic\_from\_dist('one\_over\_x\_like', logp= one\_over\_x\_\texttt{...}}&\\ +\cline{1-2} +\end{longtable} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Class Description %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.ArgumentError \textit{(class)}|(} +\subsection{Class ArgumentError} + + \label{pymc:distributions:ArgumentError} +\begin{tabular}{cccccccccccccc} +% Line for object, linespec=[False, False, False, False, False] +\multicolumn{2}{r}{\settowidth{\BCL}{object}\multirow{2}{\BCL}{object}} +&& +&& +&& +&& +&& + \\\cline{3-3} + &&\multicolumn{1}{c|}{} +&& +&& +&& +&& +&& + \\ +% Line for exceptions.BaseException, linespec=[False, False, False, False] +\multicolumn{4}{r}{\settowidth{\BCL}{exceptions.BaseException}\multirow{2}{\BCL}{exceptions.BaseException}} +&& +&& +&& +&& + \\\cline{5-5} + &&&&\multicolumn{1}{c|}{} +&& +&& +&& +&& + \\ +% Line for exceptions.Exception, linespec=[False, False, False] +\multicolumn{6}{r}{\settowidth{\BCL}{exceptions.Exception}\multirow{2}{\BCL}{exceptions.Exception}} +&& +&& +&& + \\\cline{7-7} + &&&&&&\multicolumn{1}{c|}{} +&& +&& +&& + \\ +% Line for exceptions.StandardError, linespec=[False, False] +\multicolumn{8}{r}{\settowidth{\BCL}{exceptions.StandardError}\multirow{2}{\BCL}{exceptions.StandardError}} +&& +&& + \\\cline{9-9} + &&&&&&&&\multicolumn{1}{c|}{} +&& +&& + \\ +% Line for exceptions.AttributeError, linespec=[False] +\multicolumn{10}{r}{\settowidth{\BCL}{exceptions.AttributeError}\multirow{2}{\BCL}{exceptions.AttributeError}} +&& + \\\cline{11-11} + &&&&&&&&&&\multicolumn{1}{c|}{} +&& + \\ +&&&&&&&&&&\multicolumn{2}{l}{\textbf{pymc.distributions.ArgumentError}} +\end{tabular} + + +Incorrect class argument + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Methods %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Methods} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_delattr\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_delattr\_\_('name') {\textless}=={\textgreater} del x.name + + \vspace{1ex} + + Overrides: object.\_\_delattr\_\_ + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_getattribute\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_getattribute\_\_('name') {\textless}=={\textgreater} x.name + + \vspace{1ex} + + Overrides: object.\_\_getattribute\_\_ + + \end{boxedminipage} + + \label{exceptions:BaseException:__getitem__} + \index{exceptions.BaseException.\_\_getitem\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_getitem\_\_}(\textit{x}, \textit{y}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x[y] + + \vspace{1ex} + + \end{boxedminipage} + + \label{exceptions:BaseException:__getslice__} + \index{exceptions.BaseException.\_\_getslice\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_getslice\_\_}(\textit{x}, \textit{i}, \textit{j}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x[i:j] + + Use of negative indices is not supported. + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__hash__} + \index{object.\_\_hash\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_hash\_\_}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + hash(x) + + \vspace{1ex} + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_init\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_init\_\_(...) initializes x; see x.\_\_class\_\_.\_\_doc\_\_ for + signature + + \vspace{1ex} + + Overrides: exceptions.StandardError.\_\_init\_\_ + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_new\_\_}(\textit{T}, \textit{S}, \textit{...}) + + \textbf{Return Value} + \begin{quote} +\begin{alltt} +a new object with type S, a subtype of T +\end{alltt} + + \end{quote} + + \vspace{1ex} + + Overrides: exceptions.StandardError.\_\_new\_\_ + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_reduce\_\_}(\textit{...}) + + helper for pickle + + \vspace{1ex} + + Overrides: object.\_\_reduce\_\_ extit{(inherited documentation)} + + \end{boxedminipage} + + \label{object:__reduce_ex__} + \index{object.\_\_reduce\_ex\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_reduce\_ex\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + helper for pickle + + \vspace{1ex} + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_repr\_\_}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + repr(x) + + \vspace{1ex} + + Overrides: object.\_\_repr\_\_ + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_setattr\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_setattr\_\_('name', value) {\textless}=={\textgreater} x.name = + value + + \vspace{1ex} + + Overrides: object.\_\_setattr\_\_ + + \end{boxedminipage} + + \label{exceptions:BaseException:__setstate__} + \index{exceptions.BaseException.\_\_setstate\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_setstate\_\_}(\textit{...}) + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_str\_\_}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + str(x) + + \vspace{1ex} + + Overrides: object.\_\_str\_\_ + + \end{boxedminipage} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Properties %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Properties} + +\begin{longtable}{|p{.30\textwidth}|p{.62\textwidth}|l} +\cline{1-2} +\cline{1-2} \centering \textbf{Name} & \centering \textbf{Description}& \\ +\cline{1-2} +\endhead\cline{1-2}\multicolumn{3}{r}{\small\textit{continued on next page}}\\\endfoot\cline{1-2} +\endlastfoot\raggedright \_\-\_\-c\-l\-a\-s\-s\-\_\-\_\- & \raggedright \textbf{Value:} +{\tt {\textless}attribute '\_\_class\_\_' of 'object' objects{\textgreater}}&\\ +\cline{1-2} +\raggedright a\-r\-g\-s\- & \raggedright \textbf{Value:} +{\tt {\textless}attribute 'args' of 'exceptions.BaseException' objects{\textgreater}}&\\ +\cline{1-2} +\raggedright m\-e\-s\-s\-a\-g\-e\- & \raggedright \textbf{Value:} +{\tt {\textless}member 'message' of 'exceptions.BaseException' objects{\textgreater}}&\\ +\cline{1-2} +\end{longtable} + + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.ArgumentError \textit{(class)}|)} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Class Description %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.Categorical \textit{(class)}|(} +\subsection{Class Categorical} + + \label{pymc:distributions:Categorical} +\begin{tabular}{cccccccccccccc} +% Line for object, linespec=[False, False, False, False, False] +\multicolumn{2}{r}{\settowidth{\BCL}{object}\multirow{2}{\BCL}{object}} +&& +&& +&& +&& +&& + \\\cline{3-3} + &&\multicolumn{1}{c|}{} +&& +&& +&& +&& +&& + \\ +% Line for pymc.Node.Node, linespec=[False, False, False, False] +\multicolumn{4}{r}{\settowidth{\BCL}{pymc.Node.Node}\multirow{2}{\BCL}{pymc.Node.Node}} +&& +&& +&& +&& + \\\cline{5-5} + &&&&\multicolumn{1}{c|}{} +&& +&& +&& +&& + \\ +% Line for pymc.Node.Variable, linespec=[False, False, False] +\multicolumn{6}{r}{\settowidth{\BCL}{pymc.Node.Variable}\multirow{2}{\BCL}{pymc.Node.Variable}} +&& +&& +&& + \\\cline{7-7} + &&&&&&\multicolumn{1}{c|}{} +&& +&& +&& + \\ +% Line for pymc.Node.StochasticBase, linespec=[False, False] +\multicolumn{8}{r}{\settowidth{\BCL}{pymc.Node.StochasticBase}\multirow{2}{\BCL}{pymc.Node.StochasticBase}} +&& +&& + \\\cline{9-9} + &&&&&&&&\multicolumn{1}{c|}{} +&& +&& + \\ +% Line for pymc.PyMCObjects.Stochastic, linespec=[False] +\multicolumn{10}{r}{\settowidth{\BCL}{pymc.PyMCObjects.Stochastic}\multirow{2}{\BCL}{pymc.PyMCObjects.Stochastic}} +&& + \\\cline{11-11} + &&&&&&&&&&\multicolumn{1}{c|}{} +&& + \\ +&&&&&&&&&&\multicolumn{2}{l}{\textbf{pymc.distributions.Categorical}} +\end{tabular} + +\begin{description} +\item[{C = Categorical(name, p, minval, step{[}, trace=True, value=None,}] \leavevmode +rseed=False, isdata=False, cache{\_}depth=2, plot=True, verbose=0{]}) + +\end{description} + +A categorical random variable. Parents are p, minval, step. + +If parent p is Dirichlet and has length k-1, an implicit k'th +category is assumed to exist with associated probability 1-sum(p.value). + +Otherwise parent p's value should sum to 1. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Methods %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Methods} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_init\_\_}(\textit{self}, \textit{name}, \textit{p}, \textit{minval}=\texttt{0}, \textit{step}=\texttt{1}, \textit{value}=\texttt{None}, \textit{isdata}=\texttt{False}, \textit{size}=\texttt{1}, \textit{trace}=\texttt{True}, \textit{rseed}=\texttt{False}, \textit{cache\_depth}=\texttt{2}, \textit{plot}=\texttt{True}, \textit{verbose}=\texttt{0}) + + x.\_\_init\_\_(...) initializes x; see x.\_\_class\_\_.\_\_doc\_\_ for + signature + + \vspace{1ex} + + Overrides: pymc.PyMCObjects.Stochastic.\_\_init\_\_ + + \end{boxedminipage} + + \label{object:__delattr__} + \index{object.\_\_delattr\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_delattr\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_delattr\_\_('name') {\textless}=={\textgreater} del x.name + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__getattribute__} + \index{object.\_\_getattribute\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_getattribute\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_getattribute\_\_('name') {\textless}=={\textgreater} x.name + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__hash__} + \index{object.\_\_hash\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_hash\_\_}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + hash(x) + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__new__} + \index{object.\_\_new\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_new\_\_}(\textit{T}, \textit{S}, \textit{...}) + + \textbf{Return Value} + \begin{quote} +\begin{alltt} +a new object with type S, a subtype of T +\end{alltt} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__reduce__} + \index{object.\_\_reduce\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_reduce\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + helper for pickle + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__reduce_ex__} + \index{object.\_\_reduce\_ex\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_reduce\_ex\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + helper for pickle + + \vspace{1ex} + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_repr\_\_}(\textit{self}) + + repr(x) + + \vspace{1ex} + + Overrides: object.\_\_repr\_\_ extit{(inherited documentation)} + + \end{boxedminipage} + + \label{object:__setattr__} + \index{object.\_\_setattr\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_setattr\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_setattr\_\_('name', value) {\textless}=={\textgreater} x.name = + value + + \vspace{1ex} + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_str\_\_}(\textit{self}) + + str(x) + + \vspace{1ex} + + Overrides: pymc.Node.Node.\_\_str\_\_ + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{gen\_lazy\_function}(\textit{self}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Will be called by Node at instantiation. + \vspace{1ex} + + Overrides: pymc.Node.Node.gen\_lazy\_function + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:get_logp} + \index{pymc.PyMCObjects.Stochastic.get\_logp \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{get\_logp}(\textit{self}) + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:get_value} + \index{pymc.PyMCObjects.Stochastic.get\_value \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{get\_value}(\textit{self}) + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:random} + \index{pymc.PyMCObjects.Stochastic.random \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rand}(\textit{self}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Draws a new value for a stoch conditional on its parents +and returns it. + +Raises an error if no 'random' argument was passed to {\_}{\_}init{\_}{\_}. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:random} + \index{pymc.PyMCObjects.Stochastic.random \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{random}(\textit{self}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Draws a new value for a stoch conditional on its parents +and returns it. + +Raises an error if no 'random' argument was passed to {\_}{\_}init{\_}{\_}. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:set_logp} + \index{pymc.PyMCObjects.Stochastic.set\_logp \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{set\_logp}(\textit{self}) + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:set_value} + \index{pymc.PyMCObjects.Stochastic.set\_value \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{set\_value}(\textit{self}, \textit{value}) + + \end{boxedminipage} + + \label{pymc:Node:Variable:stats} + \index{pymc.Node.Variable.stats \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{stats}(\textit{self}, \textit{alpha}=\texttt{0.05}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Generate posterior statistics for node. + \vspace{1ex} + + \end{boxedminipage} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Properties %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Properties} + +\begin{longtable}{|p{.30\textwidth}|p{.62\textwidth}|l} +\cline{1-2} +\cline{1-2} \centering \textbf{Name} & \centering \textbf{Description}& \\ +\cline{1-2} +\endhead\cline{1-2}\multicolumn{3}{r}{\small\textit{continued on next page}}\\\endfoot\cline{1-2} +\endlastfoot\raggedright \_\-\_\-c\-l\-a\-s\-s\-\_\-\_\- & \raggedright \textbf{Value:} +{\tt {\textless}attribute '\_\_class\_\_' of 'object' objects{\textgreater}}&\\ +\cline{1-2} +\end{longtable} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Class Variables %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Class Variables} + +\begin{longtable}{|p{.30\textwidth}|p{.62\textwidth}|l} +\cline{1-2} +\cline{1-2} \centering \textbf{Name} & \centering \textbf{Description}& \\ +\cline{1-2} +\endhead\cline{1-2}\multicolumn{3}{r}{\small\textit{continued on next page}}\\\endfoot\cline{1-2} +\endlastfoot\raggedright p\-a\-r\-e\-n\-t\-\_\-n\-a\-m\-e\-s\- & \raggedright \textbf{Value:} +{\tt ['p', 'minval', 'step']}&\\ +\cline{1-2} +\raggedright c\-o\-p\-a\-r\-e\-n\-t\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_coparents, doc= "All the variables whose ex\texttt{...}}&\\ +\cline{1-2} +\raggedright e\-x\-t\-e\-n\-d\-e\-d\-\_\-c\-h\-i\-l\-d\-r\-e\-n\- & \raggedright \textbf{Value:} +{\tt property(\_get\_extended\_children, doc= "All the stochastic\texttt{...}}&\\ +\cline{1-2} +\raggedright e\-x\-t\-e\-n\-d\-e\-d\-\_\-p\-a\-r\-e\-n\-t\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_extended\_parents, doc= "All the stochastic \texttt{...}}&\\ +\cline{1-2} +\raggedright l\-o\-g\-p\- & \raggedright \textbf{Value:} +{\tt property(fget= get\_logp, fset= set\_logp, doc= "Log-probab\texttt{...}}&\\ +\cline{1-2} +\raggedright m\-a\-r\-k\-o\-v\-\_\-b\-l\-a\-n\-k\-e\-t\- & \raggedright \textbf{Value:} +{\tt property(\_get\_markov\_blanket, doc= "Self's coparents, sel\texttt{...}}&\\ +\cline{1-2} +\raggedright m\-o\-r\-a\-l\-\_\-n\-e\-i\-g\-h\-b\-o\-r\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_moral\_neighbors, doc= "Self's neighbors in \texttt{...}}&\\ +\cline{1-2} +\raggedright p\-a\-r\-e\-n\-t\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_parents, \_set\_parents, doc= "Self's parents\texttt{...}}&\\ +\cline{1-2} +\raggedright p\-l\-o\-t\- & \raggedright \textbf{Value:} +{\tt property(\_get\_plot, doc= 'A flag indicating whether self \texttt{...}}&\\ +\cline{1-2} +\raggedright v\-a\-l\-u\-e\- & \raggedright \textbf{Value:} +{\tt property(fget= get\_value, fset= set\_value, doc= "Self's c\texttt{...}}&\\ +\cline{1-2} +\end{longtable} + + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.Categorical \textit{(class)}|)} + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Class Description %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.Multinomial \textit{(class)}|(} +\subsection{Class Multinomial} + + \label{pymc:distributions:Multinomial} +\begin{tabular}{cccccccccccccc} +% Line for object, linespec=[False, False, False, False, False] +\multicolumn{2}{r}{\settowidth{\BCL}{object}\multirow{2}{\BCL}{object}} +&& +&& +&& +&& +&& + \\\cline{3-3} + &&\multicolumn{1}{c|}{} +&& +&& +&& +&& +&& + \\ +% Line for pymc.Node.Node, linespec=[False, False, False, False] +\multicolumn{4}{r}{\settowidth{\BCL}{pymc.Node.Node}\multirow{2}{\BCL}{pymc.Node.Node}} +&& +&& +&& +&& + \\\cline{5-5} + &&&&\multicolumn{1}{c|}{} +&& +&& +&& +&& + \\ +% Line for pymc.Node.Variable, linespec=[False, False, False] +\multicolumn{6}{r}{\settowidth{\BCL}{pymc.Node.Variable}\multirow{2}{\BCL}{pymc.Node.Variable}} +&& +&& +&& + \\\cline{7-7} + &&&&&&\multicolumn{1}{c|}{} +&& +&& +&& + \\ +% Line for pymc.Node.StochasticBase, linespec=[False, False] +\multicolumn{8}{r}{\settowidth{\BCL}{pymc.Node.StochasticBase}\multirow{2}{\BCL}{pymc.Node.StochasticBase}} +&& +&& + \\\cline{9-9} + &&&&&&&&\multicolumn{1}{c|}{} +&& +&& + \\ +% Line for pymc.PyMCObjects.Stochastic, linespec=[False] +\multicolumn{10}{r}{\settowidth{\BCL}{pymc.PyMCObjects.Stochastic}\multirow{2}{\BCL}{pymc.PyMCObjects.Stochastic}} +&& + \\\cline{11-11} + &&&&&&&&&&\multicolumn{1}{c|}{} +&& + \\ +&&&&&&&&&&\multicolumn{2}{l}{\textbf{pymc.distributions.Multinomial}} +\end{tabular} + +\begin{description} +\item[{M = Multinomial(name, n, p, trace=True, value=None,}] \leavevmode +rseed=False, isdata=False, cache{\_}depth=2, plot=True, verbose=0{]}) + +\end{description} + +A multinomial random variable. Parents are p, minval, step. + +If parent p is Dirichlet and has length k-1, an implicit k'th +category is assumed to exist with associated probability 1-sum(p.value). + +Otherwise parent p's value should sum to 1. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Methods %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Methods} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_init\_\_}(\textit{self}, \textit{name}, \textit{n}, \textit{p}, \textit{trace}=\texttt{True}, \textit{value}=\texttt{None}, \textit{rseed}=\texttt{False}, \textit{isdata}=\texttt{False}, \textit{cache\_depth}=\texttt{2}, \textit{plot}=\texttt{True}, \textit{verbose}=\texttt{0}) + + x.\_\_init\_\_(...) initializes x; see x.\_\_class\_\_.\_\_doc\_\_ for + signature + + \vspace{1ex} + + Overrides: pymc.PyMCObjects.Stochastic.\_\_init\_\_ + + \end{boxedminipage} + + \label{object:__delattr__} + \index{object.\_\_delattr\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_delattr\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_delattr\_\_('name') {\textless}=={\textgreater} del x.name + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__getattribute__} + \index{object.\_\_getattribute\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_getattribute\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_getattribute\_\_('name') {\textless}=={\textgreater} x.name + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__hash__} + \index{object.\_\_hash\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_hash\_\_}(\textit{x}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + hash(x) + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__new__} + \index{object.\_\_new\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_new\_\_}(\textit{T}, \textit{S}, \textit{...}) + + \textbf{Return Value} + \begin{quote} +\begin{alltt} +a new object with type S, a subtype of T +\end{alltt} + + \end{quote} + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__reduce__} + \index{object.\_\_reduce\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_reduce\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + helper for pickle + + \vspace{1ex} + + \end{boxedminipage} + + \label{object:__reduce_ex__} + \index{object.\_\_reduce\_ex\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_reduce\_ex\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + helper for pickle + + \vspace{1ex} + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_repr\_\_}(\textit{self}) + + repr(x) + + \vspace{1ex} + + Overrides: object.\_\_repr\_\_ extit{(inherited documentation)} + + \end{boxedminipage} + + \label{object:__setattr__} + \index{object.\_\_setattr\_\_ \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_setattr\_\_}(\textit{...}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + x.\_\_setattr\_\_('name', value) {\textless}=={\textgreater} x.name = + value + + \vspace{1ex} + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{\_\_str\_\_}(\textit{self}) + + str(x) + + \vspace{1ex} + + Overrides: pymc.Node.Node.\_\_str\_\_ + + \end{boxedminipage} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{gen\_lazy\_function}(\textit{self}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Will be called by Node at instantiation. + \vspace{1ex} + + Overrides: pymc.Node.Node.gen\_lazy\_function + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:get_logp} + \index{pymc.PyMCObjects.Stochastic.get\_logp \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{get\_logp}(\textit{self}) + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:get_value} + \index{pymc.PyMCObjects.Stochastic.get\_value \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{get\_value}(\textit{self}) + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:random} + \index{pymc.PyMCObjects.Stochastic.random \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{rand}(\textit{self}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Draws a new value for a stoch conditional on its parents +and returns it. + +Raises an error if no 'random' argument was passed to {\_}{\_}init{\_}{\_}. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:random} + \index{pymc.PyMCObjects.Stochastic.random \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{random}(\textit{self}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Draws a new value for a stoch conditional on its parents +and returns it. + +Raises an error if no 'random' argument was passed to {\_}{\_}init{\_}{\_}. + \vspace{1ex} + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:set_logp} + \index{pymc.PyMCObjects.Stochastic.set\_logp \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{set\_logp}(\textit{self}) + + \end{boxedminipage} + + \label{pymc:PyMCObjects:Stochastic:set_value} + \index{pymc.PyMCObjects.Stochastic.set\_value \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{set\_value}(\textit{self}, \textit{value}) + + \end{boxedminipage} + + \label{pymc:Node:Variable:stats} + \index{pymc.Node.Variable.stats \textit{(function)}} + + \vspace{0.5ex} + + \begin{boxedminipage}{\textwidth} + + \raggedright \textbf{stats}(\textit{self}, \textit{alpha}=\texttt{0.05}) + + \vspace{-1.5ex} + + \rule{\textwidth}{0.5\fboxrule} + +Generate posterior statistics for node. + \vspace{1ex} + + \end{boxedminipage} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Properties %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Properties} + +\begin{longtable}{|p{.30\textwidth}|p{.62\textwidth}|l} +\cline{1-2} +\cline{1-2} \centering \textbf{Name} & \centering \textbf{Description}& \\ +\cline{1-2} +\endhead\cline{1-2}\multicolumn{3}{r}{\small\textit{continued on next page}}\\\endfoot\cline{1-2} +\endlastfoot\raggedright \_\-\_\-c\-l\-a\-s\-s\-\_\-\_\- & \raggedright \textbf{Value:} +{\tt {\textless}attribute '\_\_class\_\_' of 'object' objects{\textgreater}}&\\ +\cline{1-2} +\end{longtable} + + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +%% Class Variables %% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + + \subsubsection{Class Variables} + +\begin{longtable}{|p{.30\textwidth}|p{.62\textwidth}|l} +\cline{1-2} +\cline{1-2} \centering \textbf{Name} & \centering \textbf{Description}& \\ +\cline{1-2} +\endhead\cline{1-2}\multicolumn{3}{r}{\small\textit{continued on next page}}\\\endfoot\cline{1-2} +\endlastfoot\raggedright p\-a\-r\-e\-n\-t\-\_\-n\-a\-m\-e\-s\- & \raggedright \textbf{Value:} +{\tt ['n', 'p']}&\\ +\cline{1-2} +\raggedright c\-o\-p\-a\-r\-e\-n\-t\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_coparents, doc= "All the variables whose ex\texttt{...}}&\\ +\cline{1-2} +\raggedright e\-x\-t\-e\-n\-d\-e\-d\-\_\-c\-h\-i\-l\-d\-r\-e\-n\- & \raggedright \textbf{Value:} +{\tt property(\_get\_extended\_children, doc= "All the stochastic\texttt{...}}&\\ +\cline{1-2} +\raggedright e\-x\-t\-e\-n\-d\-e\-d\-\_\-p\-a\-r\-e\-n\-t\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_extended\_parents, doc= "All the stochastic \texttt{...}}&\\ +\cline{1-2} +\raggedright l\-o\-g\-p\- & \raggedright \textbf{Value:} +{\tt property(fget= get\_logp, fset= set\_logp, doc= "Log-probab\texttt{...}}&\\ +\cline{1-2} +\raggedright m\-a\-r\-k\-o\-v\-\_\-b\-l\-a\-n\-k\-e\-t\- & \raggedright \textbf{Value:} +{\tt property(\_get\_markov\_blanket, doc= "Self's coparents, sel\texttt{...}}&\\ +\cline{1-2} +\raggedright m\-o\-r\-a\-l\-\_\-n\-e\-i\-g\-h\-b\-o\-r\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_moral\_neighbors, doc= "Self's neighbors in \texttt{...}}&\\ +\cline{1-2} +\raggedright p\-a\-r\-e\-n\-t\-s\- & \raggedright \textbf{Value:} +{\tt property(\_get\_parents, \_set\_parents, doc= "Self's parents\texttt{...}}&\\ +\cline{1-2} +\raggedright p\-l\-o\-t\- & \raggedright \textbf{Value:} +{\tt property(\_get\_plot, doc= 'A flag indicating whether self \texttt{...}}&\\ +\cline{1-2} +\raggedright v\-a\-l\-u\-e\- & \raggedright \textbf{Value:} +{\tt property(fget= get\_value, fset= set\_value, doc= "Self's c\texttt{...}}&\\ +\cline{1-2} +\end{longtable} + + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}!pymc.distributions.Multinomial \textit{(class)}|)} + \index{pymc \textit{(package)}!pymc.distributions \textit{(module)}|)} diff --git a/docs/theory.tex b/docs/theory.tex index 7ed48f1867b..d161d39fece 100644 --- a/docs/theory.tex +++ b/docs/theory.tex @@ -79,7 +79,7 @@ \subsection*{Rejection Sampling} g(x) \le f(x) \qquad\forall x \in (-\infty,\infty) \] -Having done this, we can now sample ${x_i}$ from $g(x)$ and accept or reject each of these values based upon $f(x_i)$. Specifically, for each draw $x_i$, we also draw a uniform random variate $u_i$ and accept $x_i$ if $u_i < f(x_i)/kg(x_i)$ (Figure \ref{fig:unbound}). This approach is made more efficient by choosing an enveloping distribution that is ``close'' to the target distribution, thus maximizing the number of accepted points. Further improvement is gained by using optimized algorithms such as importance sampling (see text) which, as the name implies, samples more frequently from important areas of the distribution. +Having done this, we can now sample ${x_i}$ from $g(x)$ and accept or reject each of these values based upon $f(x_i)$. Specifically, for each draw $x_i$, we also draw a uniform random variate $u_i$ and accept $x_i$ if $u_i < f(x_i)/kg(x_i)$ (Figure \ref{fig:unbound}). This approach is made more efficient by choosing an enveloping distribution that is ``close'' to the target distribution, thus maximizing the number of accepted points. Further improvement is gained by using optimized algorithms such as importance sampling which, as the name implies, samples more frequently from important areas of the distribution. Rejection sampling is usually subject to declining performance as the dimension of the parameter space increases, so it is used less frequently than MCMC for evaluation of posterior distributions \textbf{ref}. @@ -103,7 +103,7 @@ \section*{Markov Chains} \noindent then the stochastic process is known as a Markov chain. This conditioning specifies that the future depends on the current state, but not past states. Thus, the Markov chain wanders about the state space, remembering only where it has just been in the last time step. The collection of transition probabilities is sometimes called a \emph{transition matrix} when dealing with discrete states, or more generally, a \emph{transition kernel}. -\noindent In the context of Markov chain Monte Carlo, useful to think of the Markovian property as ``mild non-independence''\footnote{In general, for Bayesian analyses, statistical independence is less relevant, relative to classical statistical inference. Instead, we substitute the notion of \emph{exchangeability}, which is a weaker concept, but often just as useful. Exchangeability essentially implies that different permutations (orderings) of a sequence of random variables will have the same marginal distribution. A sequence of random quantities may not be considered independent in a Bayesian sense, but are frequently exchangeable.}. MCMC allows us to indirectly generate independent samples from a particular posterior distribution. +\noindent In the context of Markov chain Monte Carlo, it is useful to think of the Markovian property as ``mild non-independence''\footnote{In general, for Bayesian analyses, statistical independence is less relevant, relative to classical statistical inference. Instead, we substitute the notion of \emph{exchangeability}, which is a weaker concept, but often just as useful. Exchangeability essentially implies that different permutations (orderings) of a sequence of random variables will have the same marginal distribution. A sequence of random quantities may not be considered independent in a Bayesian sense, but are frequently exchangeable.}. MCMC allows us to indirectly generate independent samples from a particular posterior distribution. %___________________________________________________________________________ diff --git a/docs/tutorial.tex b/docs/tutorial.tex index d2a322b4e53..9a0b71afe25 100644 --- a/docs/tutorial.tex +++ b/docs/tutorial.tex @@ -55,7 +55,7 @@ \section*{Conditionally stochastic and conditionally deterministic variables} 3, 3, 1, 1, 2, 1, 1, 1, 1, 2, 4, 2, 0, 0, 1, 4, 0, 0, 0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 1, 0, 1]) \end{verbatim} -Now we create the switchpoint variable $s$: +Next, we create the switchpoint variable $s$: \begin{verbatim} s = DiscreteUniform('s', lower=0, upper=110) \end{verbatim} @@ -64,7 +64,7 @@ \section*{Conditionally stochastic and conditionally deterministic variables} e = Exponential('e', beta=1) l = Exponential('l', beta=1) \end{verbatim} -Now the variable $r$, which selects the early rate $e$ for times before $s$ and the late rate $l$ for times after $s$. We will create $r$ using the \texttt{deterministic} decorator, which converts the ordinary Python function $r$ into a \texttt{Deterministic} object. +Now, we define the variable $r$, which selects the early rate $e$ for times before $s$ and the late rate $l$ for times after $s$. We create $r$ using the \texttt{deterministic} decorator, which converts the ordinary Python function $r$ into a \texttt{Deterministic} object. \begin{verbatim} @deterministic def r(s=s, e=e, l=l): @@ -73,7 +73,7 @@ \section*{Conditionally stochastic and conditionally deterministic variables} out[s:] = l return out \end{verbatim} -Now the number of disasters $D$. We have observed the value of $D$, so we provide an initial value for it and set the init argument \texttt{isdata} to \texttt{True}: +The last step is to define the number of disasters $D$. This is done the same way as for stochastic variables, except that we set the init argument \texttt{isdata} to \texttt{True}. This tells PyMC that this object has a fixed value and does not need to be sampled: \begin{verbatim} D = Poisson('D', mu=r, value=D_array, isdata=True) \end{verbatim} @@ -190,7 +190,7 @@ \section*{Variables' values and log-probabilities} In [12]: l.logp Out[12]: -2.6491936762267811 \end{verbatim} -Stochastic objects need to call an internal function to compute their \texttt{logp} attributes, as $r$ needed to call an internal function to compute its value. Just as we created $r$ by decorating a function that computes its value, it's possible to create custom \texttt{Stochastic} objects by decorating functions that compute their log-probabilities or densities. See chapter \ref{chap:modelbuilding}. +Stochastic objects need to call an internal function to compute their \texttt{logp} attributes, as $r$ needed to call an internal function to compute its value. Just as we created $r$ by decorating a function that computes its value, it's possible to create custom \texttt{Stochastic} objects by decorating functions that compute their log-probabilities or densities (see chapter \ref{chap:modelbuilding}). \subsection*{Using \texttt{Variables} as parents of \texttt{Variables}} diff --git a/epydoc.conf b/epydoc.conf index f017fb3a4c2..b5cfb9f5e30 100644 --- a/epydoc.conf +++ b/epydoc.conf @@ -4,7 +4,8 @@ # The list of objects to document. Objects can be named using # dotted names, module filenames, or package directory names. # Alases for this option include "objects" and "values". -modules: PyMC/Model.py, PyMC/distributions.py, PyMC/SamplingMethods.py, PyMC/database +modules: docs/distributions.py +# , pymc/Model.py, pymc/distributions.py, pymc/SamplingMethods.py, pymc/database # output # The type of output that should be generated. Should be one @@ -75,7 +76,7 @@ parse: yes # introspect # Whether or not introspection should be used to examine objects. -introspect: no +introspect: yes # graph # The list of graph types that should be automatically included diff --git a/pymc/database/README.txt b/pymc/database/README.txt index 37e097a97a0..28a47ecc219 100644 --- a/pymc/database/README.txt +++ b/pymc/database/README.txt @@ -2,35 +2,20 @@ Database Backends ----------------- -By default, PyMC keeps the sampled data in memory and keeps no trace of it on the hard drive. To save this data to disk, PyMC provides different strategies, from simple ASCII files to compressed binary formats. These strategies are implemented different *database backends*, behaving identically from the user's perspective. In the following, the interface to these backends is discussed, and a description of the different backends is given. - +By default, PyMC keeps the sampled data in memory and keeps no trace of it on the hard drive. To save this data to disk, PyMC provides different storing strategies, which we refer to as *database backends*. All those backends provide the same user interface, making it trivial to switch from one backend to another. In the following, this common interface is presented, along with an individual description of each backend. Accessing Sampled Data: User Interface ======================================= +The database backend is selected by the `db` keyword:: -The choice of database backend is made when a sampler is created using the `db` keyword:: - - S = MCMC(DisasterModel, db='txt', dirname='test') + S = MCMC(DisasterModel, db='ram') -This instructs the sampler to tally samples in txt files stored in a directory named `test`. Other choices for the database are given in the table below, the default being `ram`. When the `sample` method is called, a `chain` is created storing the sampled variables. The data in this chain can be accessed for each variable using its trace object :: - - S.e.trace() - -When `S.db.close()` is called, the data is flushed to disk. That is, directories are created for each chain, with samples from each stochastic variable in a separate file. To access this data during a following session, each database provides a `load` function instantiating a `Database` object :: - - DB = Database.txt.load('test') - -This object can then be linked to a model definition using :: - - S = Sampler(DisasterSampler, db=DB) +Here, we instructed the MCMC sampler to keep the trace in the computer's live memory. This means that when the Python session closes, all data will be lost. This is the default backend. -For some databases (`hdf5`, `pickle`), loading an existing database restores the previous state of the sampler. That is, the attribtues of the Sampler, its Stochastic parameters and StepMethods are all set to the value they had at the time `D.db.close()` was called. +Each time MCMC's `sample` method is called, a `chain` is created storing the sampled variables. The data in this chain can be accessed for each variable using the call method of its trace attribute:: - -The `trace` object has the following signature .. [#]:: - - trace(self, burn=0, thin=1, chain=-1, slicing=None) + S.e.trace(burn=0, thin=1, chain=-1, slicing=None) with arguments having the following meaning: @@ -38,7 +23,7 @@ burn : int Number of initial samples to skip. thin : int - Number of samples to step. + The stride, ie the number of samples to step for each returned value. chain : int or None Index of the sampling chain to return. Use `None` to return all chains. Note that a new chain is created each time `sample` is called. @@ -46,8 +31,28 @@ chain : int or None slicing : slice Slice object used to parse the samples. Overrides burn and thin parameters. - -.. [#]: The `trace` attribute of stochastic parameters is in fact an instance of a Trace class, defined for each backend. This class has a method called `gettrace` that returns the trace of the object, and which is called by `trace()` . + + + +Loading data from a previous session +==================================== + +To store a copy of the trace on the hard disk, a number of backends are available: `txt`, `pickle`, `hdf5`, `sqlite` and `mysql`. These all write the data to disk, in such a way that it can be loaded back in a following session and appended to. So for instance, to save data in ASCII format, we would do:: + + S = MCMC(DisasterModel, db='txt', dirname='disaster_data') + S.sample(10000) + S.db.close() + +When `S.db.close()` is called, the data is flushed to disk. That is, directories are created for each chain, with samples from each stochastic variable in a separate file. To access this data during a following session, each database provides a `load` function instantiating a `Database` object :: + + DB = Database.txt.load('disaster_data') + +This `Database` object can then be linked to a model definition using :: + + S = Sampler(DisasterSampler, db=DB) + S.sample(10000) + +For some databases (`hdf5`, `pickle`), loading an existing database restores the previous state of the sampler. That is, the attributes of the Sampler, its Stochastic parameters and StepMethods are all set to the value they had at the time `S.db.close()` was called. @@ -64,48 +69,54 @@ Used by default, this backend simply holds a copy in memory, with no output writ txt --- -The `txt` backend is a modified `ram` backend, the only difference being that when the database is closed, the data is written to disk in ascii files. More precisely, the data for each chain is stored in a directory called `Chain_<#>`, the trace for each variable being stored in a file names`.txt`. This backend makes it easy to load the data using another application, but for large datasets, files tend to be embarassingly large and slow to load into memory. +With the `txt` backend, the data is written to disk in ASCII files when the class `close()` method is called. More precisely, the data for each chain is stored in a directory called `Chain_<#>`, the trace for each variable being stored in a file names`.txt`. This backend makes it easy to load the data using another application, but for large datasets, files tend to be embarassingly large and slow to load into memory. pickle ------ -As its name implies, the `pickle` database used the `Cpickle` module to save the trace objects. Use of this backend is not suggested since the generated files may become unreadable after a Python update. +As its name implies, the `pickle` database relies on the `Cpickle` module to save the trace objects. Use of this backend is appropriate for small scale, short-lived projects. For longer term or larger projects, the `pickle` backend should be avoided since generated files might be unreadable across different Python versions. + +hdf5 +---- + +The hdf5 backend uses `pyTables`_ to save data in binary HDF5 format. The main advantage of this backend is that data is flushed regularly to disk, reducing memory usage and allowing sampling of datasets much larger than the available RAM memory, speeding up data access. For this backend to work, pyTables must be installed, which in turn requires the hdf5 library. sqlite ------ -Chris ... +The sqlite backend is based on the python module sqlite3. It is not as mature as the other backends, in the sense that is does not support saving/restoring of state and plug and play reloading. + mysql ----- -Chris ... +The mysql backend is based on the MySQLd python module. It also is not as mature as the other backends. -hdf5 ----- -The hdf5 backend uses pyTables to save data in binary HDF5 format. The main advantage of this backend is that data is flushed regularly to disk, reducing memory usage and allowing sampling of datasets much larger than the available memory. Data access is also very fast. ========== ===================================== ========================= Backend Description External Dependencies ========== ===================================== ========================= no_trace Do not tally samples at all. - Use only for testing purposes. - ram Store samples in memory. + ram Store samples in live memory. txt Write data to ascii files. pickle Write data to a pickle file. + hdf5 Store samples in the HDF5 format. pytables (>2.0), libhdf5 sqlite Store samples in a sqlite database. sqlite3 mysql Store samples in a mysql database. MySQLdb - hdf5 Store samples in the HDF5 format. pytables (>2.0), libhdf5 ========== ===================================== ========================= For more information about individual backends, refer to the `API`_ documentation. .. _`database/base.py`: - PyMC/database/base.py + pymc/database/base.py .. _`API`: docs/API.pdf + +.. _`pyTables`: + http://www.pytables.org/moin + diff --git a/pymc/distributions.py b/pymc/distributions.py index 1c753a9db7b..fa61ff51b0f 100644 --- a/pymc/distributions.py +++ b/pymc/distributions.py @@ -409,7 +409,7 @@ def arlognormal_like(x, a, sigma, rho): x_i & = a_i \exp(e_i) \\ e_i & = \rho e_{i-1} + \epsilon_i - where :math:'\epsilon_i \sim N(0,\sigma)'. + where :math:`\epsilon_i \sim N(0,\sigma)`. """ return flib.arlognormal(x, np.log(a), sigma, rho, beta=1) @@ -448,16 +448,16 @@ def bernoulli_like(x, p): f(x \mid p) = p^{x- 1} (1-p)^{1-x} :Parameters: - - 'x': Series of successes (1) and failures (0). :math:'x=0,1' - - 'p': Probability of success. :math:'0 < p < 1' + - `x` : Series of successes (1) and failures (0). :math:`x=0,1` + - `p` : Probability of success. :math:`0 < p < 1` :Example: >>> bernoulli_like([0,1,0,1], .4) -2.8542325496673584 :Note: - - :math:'E(x)= p' - - :math:'Var(x)= p(1-p)' + - :math:`E(x)= p` + - :math:`Var(x)= p(1-p)` """ @@ -495,17 +495,20 @@ def beta_like(x, alpha, beta): f(x \mid \alpha, \beta) = \frac{\Gamma(\alpha + \beta)}{\Gamma(\alpha) \Gamma(\beta)} x^{\alpha - 1} (1 - x)^{\beta - 1} :Parameters: - - 'x': 0 < x < 1 - - 'alpha': > 0 - - 'beta': > 0 + x : float + 0 < x < 1 + alpha : float + > 0 + beta : float + > 0 :Example: >>> beta_like(.4,1,2) 0.18232160806655884 :Note: - - :math:'E(X)=\frac{\alpha}{\alpha+\beta}' - - :math:'Var(X)=\frac{\alpha \beta}{(\alpha+\beta)^2(\alpha+\beta+1)}' + - :math:`E(X)=\frac{\alpha}{\alpha+\beta}` + - :math:`Var(X)=\frac{\alpha \beta}{(\alpha+\beta)^2(\alpha+\beta+1)}` """ # try: @@ -553,11 +556,11 @@ def binomial_like(x, n, p): n : int Number of Bernoulli trials, > x. p : float - Probability of success in each trial, :math:'p \in [0,1]'. + Probability of success in each trial, :math:`p \in [0,1]`. :Note: - - :math:'E(X)=np' - - :math:'Var(X)=np(1-p)' + - :math:`E(X)=np` + - :math:`Var(X)=np(1-p)` """ return flib.binomial(x,n,p) @@ -619,8 +622,10 @@ def cauchy_like(x, alpha, beta): f(x \mid \alpha, \beta) = \frac{1}{\pi \beta [1 + (\frac{x-\alpha}{\beta})^2]} :Parameters: - - 'alpha' : Location parameter. - - 'beta': Scale parameter > 0. + alpha : float + Location parameter. + beta : float + Scale parameter > 0. :Note: - Mode and median are at alpha. @@ -634,7 +639,7 @@ def rchi2(nu, size=1): """ rchi2(nu, size=1) - Random :math:'\chi^2' variates. + Random :math:`\chi^2` variates. """ return np.random.chisquare(nu, size) @@ -652,20 +657,20 @@ def chi2_like(x, nu): r""" chi2_like(x, nu) - Chi-squared :math:'\chi^2' log-likelihood. + Chi-squared :math:`\chi^2` log-likelihood. .. math:: f(x \mid \nu) = \frac{x^{(\nu-2)/2}e^{-x/2}}{2^{\nu/2}\Gamma(\nu/2)} :Parameters: x : float - :math:'\ge 0' - :math:'\nu' : int - Degrees of freedom > 0 + :math:`\ge 0` + nu : int + Degrees of freedom ( :math:`nu > 0`) :Note: - - :math:'E(X)=\nu' - - :math:'Var(X)=2\nu' + - :math:`E(X)=\nu` + - :math:`Var(X)=2\nu` """ @@ -716,13 +721,13 @@ def dirichlet_like(x, theta): :Parameters: x : (n,k-1) array - Where 'n' is the number of samples and 'k' the dimension. - :math:'0 < x_i < 1', :math:'\sum_{i=1}^{k-1} x_i < 1' + Where `n` is the number of samples and `k` the dimension. + :math:`0 < x_i < 1`, :math:`\sum_{i=1}^{k-1} x_i < 1` theta : (n,k) or (1,k) float - :math:'\theta > 0' + :math:`\theta > 0` :Note: - There is an `implicit' k'th value of x, equal to :math:'\sum_{i=1}^{k-1} x_i'. + There is an implicit k'th value of x, equal to :math:`\sum_{i=1}^{k-1} x_i`. """ # Disabled multiple x's and theta's ... got confused reparametrizing @@ -769,13 +774,13 @@ def exponential_like(x, beta): :Parameters: x : float - :math:'x \ge 0' + :math:`x \ge 0` beta : float - Survival parameter :math:'\beta > 0' + Survival parameter :math:`\beta > 0` :Note: - - :math:'E(X) = \beta' - - :math:'Var(X) = \beta^2' + - :math:`E(X) = \beta` + - :math:`Var(X) = \beta^2` """ return flib.gamma(x, 1, beta) @@ -809,11 +814,16 @@ def exponweib_like(x, alpha, k, loc=0, scale=1): z & = \frac{x-loc}{scale} :Parameters: - - 'x' : > 0 - - 'alpha' : Shape parameter - - 'k' : > 0 - - 'loc' : Location parameter - - 'scale' : Scale parameter > 0. + x : float + > 0 + alpha : float + Shape parameter + k : float + > 0 + loc : float + Location parameter + scale : float + Scale parameter > 0. """ return flib.exponweib(x,alpha,k,loc,scale) @@ -851,11 +861,11 @@ def gamma_like(x, alpha, beta): :Parameters: x : float - :math:'x \ge 0' + :math:`x \ge 0` alpha : float - Shape parameter :math:'\alpha > 0'. + Shape parameter :math:`\alpha > 0`. beta : float - Scale parameter :math:'\beta > 0'. + Scale parameter :math:`\beta > 0`. """ @@ -937,11 +947,11 @@ def geometric_like(x, p): x : int Number of trials before first success, > 0. p : float - Probability of success on an individual trial, :math:'p \in [0,1]' + Probability of success on an individual trial, :math:`p \in [0,1]` :Note: - - :math:'E(X)=1/p' - - :math:'Var(X)=\frac{1-p}{p^2}' + - :math:`E(X)=1/p` + - :math:`Var(X)=\frac{1-p}{p^2}` """ @@ -972,16 +982,16 @@ def half_normal_like(x, tau): half_normal_like(x, tau) Half-normal log-likelihood, a normal distribution with mean 0 and limited - to the domain :math:'x \in \[0, \infty\)'. + to the domain :math:`x \in [0, \infty)`. .. math:: - f(x \mid \tau) = \np.sqrt{\frac{2\tau}{\pi}}\exp\left\{ {\frac{-x^2 \tau}{2}}\right\} + f(x \mid \tau) = \sqrt{\frac{2\tau}{\pi}}\exp\left\{ {\frac{-x^2 \tau}{2}}\right\} :Parameters: x : float - :math:'x \ge 0' + :math:`x \ge 0` tau : float - :math:'\tau > 0' + :math:`\tau > 0` """ @@ -1019,12 +1029,12 @@ def hypergeometric_like(x, n, m, N): population without replacement. .. math:: - f(x \mid n, m, N) = &=& \frac{\left(\begin{array}{c}m \\x\end{array}\right) \left(\begin{array}{c}N-m \\n-x\end{array}\right)}{\left(\begin{array}{c}N \\n\end{array}\right)} + f(x \mid n, m, N) = \frac{\binom{m}{x}\binom{N-m}{n-x}}{\binom{N}{n}} :Parameters: x : int Number of successes in a sample drawn from a population. - :math:'\max(0, draws-failures) \leq x \leq \min(draws, success)' + :math:`\max(0, draws-failures) \leq x \leq \min(draws, success)` n : int Size of sample drawn from the population. m : int @@ -1033,7 +1043,7 @@ def hypergeometric_like(x, n, m, N): Total number of units in the population. :Note: - :math:'E(X) = \frac{n n}{N}' + :math:`E(X) = \frac{n n}{N}` """ return flib.hyperg(x, n, m, N) @@ -1064,18 +1074,18 @@ def inverse_gamma_like(x, alpha, beta): Inverse gamma log-likelihood, the reciprocal of the gamma distribution. .. math:: - f(x \mid \alpha, \beta) = \frac{\beta^{\alpha}}{\Gamma(\alpha)} x^{-\alpha - 1} \exp\left(\frac{-\beta}{x}}\right) + f(x \mid \alpha, \beta) = \frac{\beta^{\alpha}}{\Gamma(\alpha)} x^{-\alpha - 1} \exp\left(\frac{-\beta}{x}\right) :Parameters: x : float x > 0 alpha : float - Shape parameter, :math:'\alpha > 0'. + Shape parameter, :math:`\alpha > 0`. beta : float - Scale parameter, :math:'\beta > 0'. + Scale parameter, :math:`\beta > 0`. :Note: - :math:'E(X)=\frac{1}{\beta(\alpha-1)}' for :math:'\alpha > 1'. + :math:`E(X)=\frac{1}{\beta(\alpha-1)}` for :math:`\alpha > 1`. """ return flib.igamma(x, alpha, beta) @@ -1109,7 +1119,7 @@ def lognormal_like(x, mu, tau): small independent factors. .. math:: - f(x \mid \mu, \tau) = \np.sqrt{\frac{\tau}{2\pi}}\frac{ + f(x \mid \mu, \tau) = \sqrt{\frac{\tau}{2\pi}}\frac{ \exp\left\{ -\frac{\tau}{2} (\ln(x)-\mu)^2 \right\}}{x} :Parameters: @@ -1121,7 +1131,7 @@ def lognormal_like(x, mu, tau): Scale parameter, > 0. :Note: - :math:'E(X)=e^{\mu+\frac{1}{2\tau}}' + :math:`E(X)=e^{\mu+\frac{1}{2\tau}}` """ @@ -1172,17 +1182,17 @@ def multinomial_like(x, n, p): :Parameters: x : (ns, k) int Random variable indicating the number of time outcome i is observed, - :math:'\sum_{i=1}^k x_i=n', :math:'x_i \ge 0'. + :math:`\sum_{i=1}^k x_i=n`, :math:`x_i \ge 0`. n : int Number of trials. p : (k,) float Probability of each one of the different outcomes, - :math:'\sum_{i=1}^k p_i = 1)', :math:'p_i \ge 0'. + :math:`\sum_{i=1}^k p_i = 1)`, :math:`p_i \ge 0`. :Note: - - :math:'E(X_i)=n p_i' - - :math:'var(X_i)=n p_i(1-p_i)' - - :math:'cov(X_i,X_j) = -n p_i p_j' + - :math:`E(X_i)=n p_i` + - :math:`var(X_i)=n p_i(1-p_i)` + - :math:`cov(X_i,X_j) = -n p_i p_j` """ @@ -1235,13 +1245,13 @@ def multivariate_hypergeometric_like(x, m): .. math:: - \frac{\prod_i \binom{m_i}{c_i}}{\binom{N}{n]} + \frac{\prod_i \binom{m_i}{x_i}}{\binom{N}{n}} - where :math:'N = \sum_i m_i' and :math:'n = \sum_i x_i'. + where :math:`N = \sum_i m_i` and :math:`n = \sum_i x_i`. :Parameters: x : int sequence - Number of draws from each category, :math:'< m' + Number of draws from each category, :math:`< m` m : int sequence Number of items in each categoy. """ @@ -1474,7 +1484,7 @@ def normal_like(x, mu, tau): Normal log-likelihood. .. math:: - f(x \mid \mu, \tau) = \np.sqrt{\frac{\tau}{2\pi}} \exp\left\{ -\frac{\tau}{2} (x-\mu)^2 \right\} + f(x \mid \mu, \tau) = \sqrt{\frac{\tau}{2\pi}} \exp\left\{ -\frac{\tau}{2} (x-\mu)^2 \right\} :Parameters: @@ -1486,8 +1496,8 @@ def normal_like(x, mu, tau): Precision of the distribution, > 0. :Note: - - :math:'E(X) = \mu' - - :math:'Var(X) = 1/\tau' + - :math:`E(X) = \mu` + - :math:`Var(X) = 1/\tau` """ # try: @@ -1533,14 +1543,14 @@ def poisson_like(x,mu): :Parameters: x : int - :math:'x \in {0,1,2,...}' + :math:`x \in {0,1,2,...}` mu : float Expected number of occurrences that occur during the given interval, - :math:'\mu \geq 0'. + :math:`\mu \geq 0`. :Note: - - :math:'E(x)=\mu' - - :math:'Var(x)=\mu' + - :math:`E(x)=\mu` + - :math:`Var(x)=\mu` """ # try: # constrain(x, lower=0,allow_equal=True) @@ -1618,7 +1628,7 @@ def skew_normal_like(x,mu,tau,alpha): Azzalini's skew-normal log-likelihood - ..math:: + .. math:: f(x \mid \mu, \tau, \alpha) = 2 \Phi((x-\mu)\sqrt{tau}\alpha) \phi(x,\mu,\tau) :Parameters: @@ -1675,7 +1685,7 @@ def discrete_uniform_like(x,lower, upper): :Parameters: x : float - :math:'lower \geq x \geq upper' + :math:`lower \geq x \geq upper` lower : float Lower limit. upper : float @@ -1714,7 +1724,7 @@ def uniform_like(x,lower, upper): :Parameters: x : float - :math:'lower \geq x \geq upper' + :math:`lower \geq x \geq upper` lower : float Lower limit. upper : float @@ -1749,15 +1759,15 @@ def weibull_like(x, alpha, beta): :Parameters: x : float - :math:'x \ge 0' + :math:`x \ge 0` alpha : float > 0 beta : float > 0 :Note: - - :math:'E(x)=\beta \Gamma(1+\frac{1}{\alpha})' - - :math:'Var(x)=\beta^2 \Gamma(1+\frac{2}{\alpha} - \mu^2)' + - :math:`E(x)=\beta \Gamma(1+\frac{1}{\alpha})` + - :math:`Var(x)=\beta^2 \Gamma(1+\frac{2}{\alpha} - \mu^2)` """ # try: # constrain(alpha, lower=0) @@ -1774,7 +1784,7 @@ def rwishart(n, Tau): Return a Wishart random matrix. - Tau is the inverse of the 'covariance' matrix :math:'C'. + Tau is the inverse of the 'covariance' matrix :math:`C`. """ p = Tau.shape[0] @@ -1811,7 +1821,7 @@ def wishart_like(X, n, Tau): .. math:: f(X \mid n, T) = {\mid T \mid}^{n/2}{\mid X \mid}^{(n-k-1)/2} \exp\left\{ -\frac{1}{2} Tr(TX) \right\} - where :math:'k' is the rank of X. + where :math:`k` is the rank of X. :Parameters: X : matrix @@ -1851,7 +1861,7 @@ def wishart_cov_expval(n, C): return n * np.asarray(C) def wishart_cov_like(X, n, C): - r""" + r"""PLEASE CHECK THIS DOCSTRING wishart_like(X, n, C) Wishart log-likelihood. The Wishart distribution is the probability @@ -1862,14 +1872,14 @@ def wishart_cov_like(X, n, C): .. math:: f(X \mid n, T) = {\mid T \mid}^{n/2}{\mid X \mid}^{(n-k-1)/2} \exp\left\{ -\frac{1}{2} Tr(TX) \right\} - where :math:'k' is the rank of X. + where :math:`k` is the rank of X. :Parameters: X : matrix Symmetric, positive definite. n : int Degrees of freedom, > 0. - Tau : matrix + C : matrix Symmetric and positive definite """ diff --git a/pymc/tests/test_database.py b/pymc/tests/test_database.py index a5441929781..2e6676a41a7 100644 --- a/pymc/tests/test_database.py +++ b/pymc/tests/test_database.py @@ -71,12 +71,7 @@ def check_load(self): assert_array_equal(S.e.trace(chain=None).shape, (300,)) assert_equal(S.e.trace.length(None), 300) S.db.close() -##class test_txt(NumpyTestCase): -## def check(self): -## M = MCMC(DisasterModel, db='txt') -## M.sample(300,100,2) -## assert_equal(M.e.trace().shape, (150,)) -## + ##class test_mysql(NumpyTestCase): ## def check(self): ## M = MCMC(DisasterModel, db='mysql') @@ -106,10 +101,6 @@ def check_load(self): assert_array_equal(S.e.trace(chain=None).shape, (300,)) S.db.close() -##class test_hdf5(NumpyTestCase): -## def check(self): -## M = MCMC(DisasterModel, db='hdf5') -## M.sample(300,100,2) if hasattr(database, 'hdf5'): class test_hdf5(NumpyTestCase): def __init__(*args, **kwds):