diff --git a/pyro/contrib/minipyro.py b/pyro/contrib/minipyro.py index 9a06dda9e0..fb176c8101 100644 --- a/pyro/contrib/minipyro.py +++ b/pyro/contrib/minipyro.py @@ -20,9 +20,9 @@ # Pyro keeps track of two kinds of global state: # i) The effect handler stack, which enables non-standard interpretations of # Pyro primitives like sample(); -# See http://docs.pyro.ai/en/0.3.0-release/poutine.html +# See http://docs.pyro.ai/en/0.3.1/poutine.html # ii) Trainable parameters in the Pyro ParamStore; -# See http://docs.pyro.ai/en/0.3.0-release/parameters.html +# See http://docs.pyro.ai/en/0.3.1/parameters.html PYRO_STACK = [] PARAM_STORE = {} @@ -201,7 +201,7 @@ def plate(name, size, dim): # This is a thin wrapper around the `torch.optim.Adam` class that # dynamically generates optimizers for dynamically generated parameters. -# See http://docs.pyro.ai/en/0.3.0-release/optimization.html +# See http://docs.pyro.ai/en/0.3.1/optimization.html class Adam(object): def __init__(self, optim_args): self.optim_args = optim_args @@ -226,7 +226,7 @@ def __call__(self, params): # This is a unified interface for stochastic variational inference in Pyro. # The actual construction of the loss is taken care of by `loss`. -# See http://docs.pyro.ai/en/0.3.0-release/inference_algos.html +# See http://docs.pyro.ai/en/0.3.1/inference_algos.html class SVI(object): def __init__(self, model, guide, optim, loss): self.model = model diff --git a/tutorial/source/bo.ipynb b/tutorial/source/bo.ipynb index 69aca8a4ef..b673f7fbcd 100644 --- a/tutorial/source/bo.ipynb +++ b/tutorial/source/bo.ipynb @@ -35,7 +35,7 @@ "\n", "A good acquisition function should make use of the uncertainty encoded in the posterior to encourage a balance between exploration—querying points where we know little about $f$—and exploitation—querying points in regions we have good reason to think $x^*$ may lie. As the iterative procedure progresses our model for $f$ evolves and so does the acquisition function. If our model is good and we've chosen a reasonable acquisition function, we expect that the acquisition function will guide the query points $x_n$ towards $x^*$.\n", "\n", - "In this tutorial, our model for $f$ will be a Gaussian process. In particular we will see how to use the [Gaussian Process module](http://docs.pyro.ai/en/0.2.0-release/contrib.gp.html) in Pyro to implement a simple Bayesian optimization procedure." + "In this tutorial, our model for $f$ will be a Gaussian process. In particular we will see how to use the [Gaussian Process module](http://docs.pyro.ai/en/0.3.1/contrib.gp.html) in Pyro to implement a simple Bayesian optimization procedure." ] }, {