Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update uses of distributions / bijectors to use TensorFlow Probability. #924

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions docs/tex/api/model-development.tex
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,14 @@ \subsubsection{Developing Custom Random Variables}
Oftentimes we'd like to implement our own random variables.
To do so, write a class that inherits
the \texttt{RandomVariable} class in \texttt{edward.models} and
the \texttt{Distribution} class in \texttt{tf.contrib.distributions} (in that
the \texttt{Distribution} class in \texttt{tfp.distributions} (in that
order). A template is provided below.

\begin{lstlisting}[language=Python]
from edward.models import RandomVariable
from tensorflow.contrib.distributions import Distribution
import tensorflow_probability as tfp

class CustomRandomVariable(RandomVariable, Distribution):
class CustomRandomVariable(RandomVariable, tfp.distributions.Distribution):
def __init__(self, *args, **kwargs):
super(CustomRandomVariable, self).__init__(*args, **kwargs)

Expand All @@ -39,7 +39,7 @@ \subsubsection{Developing Custom Random Variables}
in the Github repository.
For more details and more methods one can implement, see the API
documentation in TensorFlow's
\href{https://www.tensorflow.org/api_docs/python/tf/contrib/distributions/Distribution}{\texttt{Distribution} class}.
\href{https://www.tensorflow.org/probability/api_docs/python/tfp/distributions}{\texttt{Distribution} class}.

\subsubsection{Advanced settings}

Expand Down
8 changes: 4 additions & 4 deletions docs/tex/tutorials/automated-transformations.tex
Original file line number Diff line number Diff line change
Expand Up @@ -99,18 +99,18 @@ \subsubsection{Automated Transformations in Inference}
and then inverting the transformation:

\begin{lstlisting}[language=Python]
from tensorflow.contrib.distributions import bijectors
import tensorflow_probability as tfp

x_unconstrained = inference.transformations[x] # transformed prior
x_transform = x_unconstrained.bijector # transformed prior's transformation
qx_constrained = ed.transform(qx, bijectors.Invert(x_transform))
qx_constrained = ed.transform(qx, tfp.bijectors.Invert(x_transform))
\end{lstlisting}

The set of transformations is given by
\texttt{inference.transformations}, which is a dictionary with keys
given by any constrained latent variables and values given by their
transformed distribution. We use the
\href{https://www.tensorflow.org/api_docs/python/tf/distributions/bijectors}{\texttt{bijectors}}
\href{https://www.tensorflow.org/probability/api_docs/python/tfp/bijectors}{\texttt{bijectors}}
module in \texttt{tf.distributions} in order to handle invertible
transformations.

Expand Down Expand Up @@ -158,7 +158,7 @@ \subsubsection{Automated Transformations in Inference}
transformation.

\begin{lstlisting}[language=Python]
from tensorflow.contrib.distributions import bijectors
import tensorflow_probability as tfp

x_unconstrained = inference.transformations[x] # transformed prior
x_transform = x_unconstrained.bijector # transformed prior's transformation
Expand Down
13 changes: 12 additions & 1 deletion edward/inferences/inference.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,18 @@
from edward.util import check_data, check_latent_vars, get_session, \
get_variables, Progbar, transform

from tensorflow.contrib.distributions import bijectors
try:
import tensorflow_probability as tfp
bijectors = tfp.bijectors
except Exception as e:
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
from tensorflow.contrib.distributions import bijectors
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))



@six.add_metaclass(abc.ABCMeta)
Expand Down
17 changes: 13 additions & 4 deletions edward/inferences/klqp.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,19 @@

try:
from edward.models import Normal
from tensorflow.contrib.distributions import kl_divergence
import tensorflow_probability as tfp
kl_divergence = tfp.distributions.kl_divergence
FULLY_REPARAMETERIZED = tfp.distributions.FULLY_REPARAMETERIZED
except Exception as e:
raise ImportError("{0}. Your TensorFlow version is not supported.".format(e))
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
from edward.models import Normal
from tensorflow.contrib.distributions import kl_divergence
FULLY_REPARAMETERIZED = tf.contrib.distributions.FULLY_REPARAMETERIZED
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))


class KLqp(VariationalInference):
Expand Down Expand Up @@ -136,8 +146,7 @@ def build_loss_and_gradients(self, var_list):
Normal.
"""
is_reparameterizable = all([
rv.reparameterization_type ==
tf.contrib.distributions.FULLY_REPARAMETERIZED
rv.reparameterization_type == FULLY_REPARAMETERIZED
for rv in six.itervalues(self.latent_vars)])
is_analytic_kl = all([isinstance(z, Normal) and isinstance(qz, Normal)
for z, qz in six.iteritems(self.latent_vars)])
Expand Down
12 changes: 10 additions & 2 deletions edward/inferences/map.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,17 @@
from edward.util import copy, transform

try:
from tensorflow.contrib.distributions import bijectors
import tensorflow_probability as tfp
bijectors = tfp.bijectors
except Exception as e:
raise ImportError("{0}. Your TensorFlow version is not supported.".format(e))
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
from tensorflow.contrib.distributions import bijectors
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))



class MAP(VariationalInference):
Expand Down
16 changes: 13 additions & 3 deletions edward/models/dirichlet_process.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,23 @@
import tensorflow as tf

from edward.models.random_variable import RandomVariable
from tensorflow.contrib.distributions import Distribution

try:
from edward.models.random_variables import Bernoulli, Beta
from tensorflow.contrib.distributions import NOT_REPARAMETERIZED
import tensorflow_probability as tfp
Distribution = tfp.distributions.Distribution
NOT_REPARAMETERIZED = tfp.distributions.NOT_REPARAMETERIZED
except Exception as e:
raise ImportError("{0}. Your TensorFlow version is not supported.".format(e))
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
from edward.models.random_variables import Bernoulli, Beta
from tensorflow.contrib.distributions import Distribution
from tensorflow.contrib.distributions import NOT_REPARAMETERIZED
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))



class distributions_DirichletProcess(Distribution):
Expand Down
19 changes: 15 additions & 4 deletions edward/models/empirical.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,12 +5,23 @@
import tensorflow as tf

from edward.models.random_variable import RandomVariable
from tensorflow.contrib.distributions import Distribution

try:
from tensorflow.contrib.distributions import FULLY_REPARAMETERIZED
import tensorflow_probability as tfp
Distribution = tfp.distributions.Distribution
Categorical = tfp.distributions.Categorical
FULLY_REPARAMETERIZED = tfp.distributions.FULLY_REPARAMETERIZED
except Exception as e:
raise ImportError("{0}. Your TensorFlow version is not supported.".format(e))
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
from tensorflow.contrib.distributions import Distribution
Categorical = tf.distributions.Categorical
FULLY_REPARAMETERIZED = tf.contrib.distributions.FULLY_REPARAMETERIZED
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))



class distributions_Empirical(Distribution):
Expand Down Expand Up @@ -104,7 +115,7 @@ def _sample_n(self, n, seed=None):
return tf.tile(input_tensor, multiples)
else:
probs = tf.ones([self.n]) / tf.cast(self.n, dtype=tf.float32)
cat = tf.contrib.distributions.Categorical(probs)
cat = Categorical(probs)
indices = cat._sample_n(n, seed)
tensor = tf.gather(input_tensor, indices)
return tensor
Expand Down
13 changes: 11 additions & 2 deletions edward/models/param_mixture.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,21 @@
import tensorflow as tf

from edward.models.random_variable import RandomVariable
from tensorflow.contrib.distributions import Distribution

try:
from edward.models.random_variables import Categorical
import tensorflow_probability as tfp
Distribution = tfp.distributions.Distribution
except Exception as e:
raise ImportError("{0}. Your TensorFlow version is not supported.".format(e))
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
from edward.models.random_variables import Categorical
from tensorflow.contrib.distributions import Distribution
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))



class distributions_ParamMixture(Distribution):
Expand Down
12 changes: 10 additions & 2 deletions edward/models/point_mass.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,9 +8,17 @@
from tensorflow.contrib.distributions import Distribution

try:
from tensorflow.contrib.distributions import FULLY_REPARAMETERIZED
import tensorflow_probability as tfp
FULLY_REPARAMETERIZED = tfp.distributions.FULLY_REPARAMETERIZED
except Exception as e:
raise ImportError("{0}. Your TensorFlow version is not supported.".format(e))
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
FULLY_REPARAMETERIZED = tf.contrib.distributions.FULLY_REPARAMETERIZED
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))



class distributions_PointMass(Distribution):
Expand Down
2 changes: 1 addition & 1 deletion edward/models/random_variable.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ class RandomVariable(object):

`RandomVariable` assumes use in a multiple inheritance setting. The
child class must first inherit `RandomVariable`, then second inherit a
class in `tf.contrib.distributions`. With Python's method resolution
class in `tfp.distributions`. With Python's method resolution
order, this implies the following during initialization (using
`distributions.Bernoulli` as an example):

Expand Down
13 changes: 12 additions & 1 deletion edward/util/random_variables.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,18 @@
from tensorflow.python.framework.ops import set_shapes_for_outputs
from tensorflow.python.util import compat

tfb = tf.contrib.distributions.bijectors
try:
import tensorflow_probability as tfp
tfb = tfp.bijectors
except Exception as e:
print("{0}. Can not import TensorFlow Probability, "
"defaulting to TensorFlow.".format(e))
try:
tfb = tf.contrib.distributions.bijectors
except Exception as e2:
raise ImportError(
"{0}. Your TensorFlow version is not supported.".format(e2))



def check_data(data):
Expand Down
3 changes: 2 additions & 1 deletion examples/deep_exponential_family.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,7 @@ class objects visual
import numpy as np
import os
import tensorflow as tf
import tenosrflow_probability as tfp

from datetime import datetime
from edward.models import Gamma, Poisson, Normal, PointMass, \
Expand Down Expand Up @@ -126,7 +127,7 @@ def lognormal_q(shape, name=None):
"scale", shape, initializer=tf.random_normal_initializer(stddev=0.1))
rv = TransformedDistribution(
distribution=Normal(loc, tf.maximum(tf.nn.softplus(scale), min_scale)),
bijector=tf.contrib.distributions.bijectors.Exp())
bijector=tfp.bijectors.Exp())
return rv


Expand Down
5 changes: 3 additions & 2 deletions notebooks/automated_transformations.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,8 @@
"import tensorflow as tf\n",
"\n",
"from edward.models import Empirical, Gamma, Normal\n",
"from tensorflow.contrib.distributions import bijectors"
"import tensorflow_probability as tfp\n",
"bijectors = tfp.bijectors",
]
},
{
Expand Down Expand Up @@ -172,7 +173,7 @@
"`inference.transformations`, which is a dictionary with keys\n",
"given by any constrained latent variables and values given by their\n",
"transformed distribution. We use the\n",
"[`bijectors`](https://www.tensorflow.org/versions/master/api_docs/python/tf/distributions/bijectors)\n",
"[`bijectors`](https://www.tensorflow.org/probability/api_docs/python/tfp/bijectors)\n",
"module in `tf.distributions` in order to handle invertible\n",
"transformations.\n",
"`qx_unconstrained` is a random variable distributed\n",
Expand Down
8 changes: 4 additions & 4 deletions tests/inferences/ar_process_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,16 +5,15 @@
import edward as ed
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp

from edward.models import Normal, PointMass
from scipy.optimize import minimize

from edward.models import RandomVariable
from tensorflow.contrib.distributions import Distribution
from tensorflow.contrib.distributions import FULLY_REPARAMETERIZED


class AutoRegressive(RandomVariable, Distribution):
class AutoRegressive(RandomVariable, tfp.distributions.Distribution):
# a 1-D AR(1) process
# a[t + 1] = a[t] + eps with eps ~ N(0, sig**2)
def __init__(self, T, a, sig, *args, **kwargs):
Expand All @@ -29,7 +28,8 @@ def __init__(self, T, a, sig, *args, **kwargs):
if 'allow_nan_stats' not in kwargs:
kwargs['allow_nan_stats'] = False
if 'reparameterization_type' not in kwargs:
kwargs['reparameterization_type'] = FULLY_REPARAMETERIZED
kwargs['reparameterization_type'] = (
tfp.distributions.FULLY_REPARAMETERIZED)
if 'validate_args' not in kwargs:
kwargs['validate_args'] = False
if 'name' not in kwargs:
Expand Down
13 changes: 7 additions & 6 deletions tests/inferences/inference_auto_transform_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,11 @@
import edward as ed
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp

from edward.models import (Empirical, Gamma, Normal, PointMass,
TransformedDistribution, Beta, Bernoulli)
from edward.util import transform
from tensorflow.contrib.distributions import bijectors


class test_inference_auto_transform_class(tf.test.TestCase):
Expand All @@ -20,7 +20,7 @@ def test_auto_transform_true(self):
# automated transformation on latter (assuming it is softplus).
x = TransformedDistribution(
distribution=Normal(0.0, 0.5),
bijector=tf.contrib.distributions.bijectors.Softplus())
bijector=tfp.bijectors.Softplus())
x.support = 'nonnegative'
qx = Normal(loc=tf.Variable(tf.random_normal([])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([]))))
Expand All @@ -36,7 +36,8 @@ def test_auto_transform_true(self):
n_samples = 10000
x_mean, x_var = tf.nn.moments(x.sample(n_samples), 0)
x_unconstrained = inference.transformations[x]
qx_constrained = transform(qx, bijectors.Invert(x_unconstrained.bijector))
qx_constrained = transform(
qx, tfp.bijectors.Invert(x_unconstrained.bijector))
qx_mean, qx_var = tf.nn.moments(qx_constrained.sample(n_samples), 0)
stats = sess.run([x_mean, qx_mean, x_var, qx_var])
self.assertAllClose(info_dict['loss'], 0.0, rtol=0.2, atol=0.2)
Expand All @@ -49,7 +50,7 @@ def test_auto_transform_false(self):
# automated transformation; it should fail.
x = TransformedDistribution(
distribution=Normal(0.0, 0.5),
bijector=tf.contrib.distributions.bijectors.Softplus())
bijector=tfp.bijectors.Softplus())
x.support = 'nonnegative'
qx = Normal(loc=tf.Variable(tf.random_normal([])),
scale=tf.nn.softplus(tf.Variable(tf.random_normal([]))))
Expand Down Expand Up @@ -115,7 +116,7 @@ def test_hmc_custom(self):
with self.test_session() as sess:
x = TransformedDistribution(
distribution=Normal(1.0, 1.0),
bijector=tf.contrib.distributions.bijectors.Softplus())
bijector=tfp.bijectors.Softplus())
x.support = 'nonnegative'
qx = Empirical(tf.Variable(tf.random_normal([1000])))

Expand All @@ -140,7 +141,7 @@ def test_hmc_default(self):
with self.test_session() as sess:
x = TransformedDistribution(
distribution=Normal(1.0, 1.0),
bijector=tf.contrib.distributions.bijectors.Softplus())
bijector=tfp.bijectors.Softplus())
x.support = 'nonnegative'

inference = ed.HMC([x])
Expand Down
Loading