Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

changed keras #453

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 3 additions & 3 deletions Upgrading_From_Edward_To_Edward2.md
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ approximation—another Edward2 program—and apply tracers to write the
evidence lower bound (Hinton & Camp, 1993; Jordan, Ghahramani, Jaakkola, & Saul,
1999; Waterhouse, MacKay, & Robinson, 1996). Note we use factory functions
(functions which build other functions) for simplicity, but you can also use
`tf.keras.Models` as stateful classes which automatically manage the variables.
`tf.python.keras.Models` as stateful classes which automatically manage the variables.

```python
def build_trainable_positive_pointmass(shape, name=None):
Expand Down Expand Up @@ -259,7 +259,7 @@ def train_step(bag_of_words, step):
with writer.default():
tf.summary.scalar("elbo", elbo, step=step)
loss = -elbo
optimizer = tf.keras.optimizers.Adam(1e-3)
optimizer = tf.python.keras.optimizers.Adam(1e-3)
gradients = tape.gradient(loss, trainable_variables)
optimizer.apply_gradients(zip(gradients, trainable_variables))
return loss
Expand Down Expand Up @@ -375,7 +375,7 @@ observed_statistics, replicated_statistics = ed.ppc(
```

__Edward2__. Build the metric manually or use TensorFlow
abstractions such as `tf.keras.metrics`.
abstractions such as `tf.python.keras.metrics`.

```python
# See posterior_predictive built in Variational Inference section.
Expand Down
24 changes: 12 additions & 12 deletions edward2/tensorflow/constraints.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@

"""Constraints.

One subtlety is how Bayesian Layers uses `tf.keras.constraints`. Typically,
One subtlety is how Bayesian Layers uses `tf.python.keras.constraints`. Typically,
Keras constraints are used with projected gradient descent, where one performs
unconstrained optimization and then applies a projection (the constraint) after
each gradient update. To stay in line with probabilistic literature, trainable
Expand All @@ -27,10 +27,10 @@
import tensorflow as tf


class Exp(tf.keras.constraints.Constraint):
class Exp(tf.python.keras.constraints.Constraint):
"""Exp constraint."""

def __init__(self, epsilon=tf.keras.backend.epsilon()):
def __init__(self, epsilon=tf.python.keras.backend.epsilon()):
self.epsilon = epsilon

def __call__(self, w):
Expand All @@ -40,10 +40,10 @@ def get_config(self):
return {'epsilon': self.epsilon}


class Positive(tf.keras.constraints.Constraint):
class Positive(tf.python.keras.constraints.Constraint):
"""Positive constraint."""

def __init__(self, epsilon=tf.keras.backend.epsilon()):
def __init__(self, epsilon=tf.python.keras.backend.epsilon()):
self.epsilon = epsilon

def __call__(self, w):
Expand All @@ -53,10 +53,10 @@ def get_config(self):
return {'epsilon': self.epsilon}


class Softplus(tf.keras.constraints.Constraint):
class Softplus(tf.python.keras.constraints.Constraint):
"""Softplus constraint."""

def __init__(self, epsilon=tf.keras.backend.epsilon()):
def __init__(self, epsilon=tf.python.keras.backend.epsilon()):
self.epsilon = epsilon

def __call__(self, w):
Expand All @@ -66,23 +66,23 @@ def get_config(self):
return {'epsilon': self.epsilon}


# Compatibility aliases, following tf.keras
# Compatibility aliases, following tf.python.keras

# pylint: disable=invalid-name
exp = Exp
positive = Positive
softplus = Softplus
# pylint: enable=invalid-name

# Utility functions, following tf.keras
# Utility functions, following tf.python.keras


def serialize(initializer):
return tf.keras.utils.serialize_keras_object(initializer)
return tf.python.keras.utils.serialize_keras_object(initializer)


def deserialize(config, custom_objects=None):
return tf.keras.utils.deserialize_keras_object(
return tf.python.keras.utils.deserialize_keras_object(
config,
module_objects=globals(),
custom_objects=custom_objects,
Expand All @@ -108,4 +108,4 @@ def get(identifier, value=None):
pass
elif callable(identifier):
return identifier
return tf.keras.constraints.get(value)
return tf.python.keras.constraints.get(value)
2 changes: 1 addition & 1 deletion edward2/tensorflow/constraints_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def testConstraintsGet(self):
self.assertIsInstance(ed.constraints.get('positive'),
ed.constraints.Positive)
self.assertIsInstance(ed.constraints.get('non_neg'),
tf.keras.constraints.NonNeg)
tf.python.keras.constraints.NonNeg)


if __name__ == '__main__':
Expand Down
60 changes: 30 additions & 30 deletions edward2/tensorflow/initializers.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,13 +15,13 @@

"""Initializers.

This module extends `tf.keras.initializers` with the notion of "trainable
initializers", where initializers to weights and biases in `tf.keras.layers` may
This module extends `tf.python.keras.initializers` with the notion of "trainable
initializers", where initializers to weights and biases in `tf.python.keras.layers` may
themselves carry parameters. For example, consider a weight initializer which
returns a variational distribution: this is reified as an `ed.RandomVariable`
parameterized by `tf.Variables`.

One subtlety is how `tf.keras.constraints` are used on the parameters of
One subtlety is how `tf.python.keras.constraints` are used on the parameters of
trainable initializers. Typically, Keras constraints are used with projected
gradient descent, where one performs unconstrained optimization and then applies
a projection (the constraint) after each gradient update. To stay in line with
Expand Down Expand Up @@ -110,7 +110,7 @@ def _compute_fans(shape):
return fan_in, fan_out


class ScaledNormalStdDev(tf.keras.initializers.VarianceScaling):
class ScaledNormalStdDev(tf.python.keras.initializers.VarianceScaling):
"""Initializer capable of adapting its scale to the shape of weights tensors.

This initializes the standard deviation parameter of a Trainable Normal
Expand Down Expand Up @@ -173,7 +173,7 @@ def __call__(self, shape, dtype=None):
dtype=dtype, seed=self.seed)


class TrainableDeterministic(tf.keras.layers.Layer):
class TrainableDeterministic(tf.python.keras.layers.Layer):
"""Deterministic point-wise initializer with trainable location."""

def __init__(self,
Expand Down Expand Up @@ -225,13 +225,13 @@ def get_config(self):
}


class TrainableHalfCauchy(tf.keras.layers.Layer):
class TrainableHalfCauchy(tf.python.keras.layers.Layer):
"""Half-Cauchy distribution initializer with trainable parameters."""

def __init__(self,
loc_initializer=tf.keras.initializers.TruncatedNormal(
loc_initializer=tf.python.keras.initializers.TruncatedNormal(
stddev=1e-5),
scale_initializer=tf.keras.initializers.TruncatedNormal(
scale_initializer=tf.python.keras.initializers.TruncatedNormal(
mean=-3., stddev=0.1),
loc_regularizer=None,
scale_regularizer=None,
Expand Down Expand Up @@ -303,13 +303,13 @@ def get_config(self):
}


class TrainableCauchy(tf.keras.layers.Layer):
class TrainableCauchy(tf.python.keras.layers.Layer):
"""Cauchy distribution initializer with trainable parameters."""

def __init__(
self,
loc_initializer=tf.keras.initializers.TruncatedNormal(stddev=1e-5),
scale_initializer=tf.keras.initializers.TruncatedNormal(
loc_initializer=tf.python.keras.initializers.TruncatedNormal(stddev=1e-5),
scale_initializer=tf.python.keras.initializers.TruncatedNormal(
mean=-3., stddev=0.1),
loc_regularizer=None,
scale_regularizer=None,
Expand Down Expand Up @@ -374,13 +374,13 @@ def get_config(self):
}


class TrainableLogNormal(tf.keras.layers.Layer):
class TrainableLogNormal(tf.python.keras.layers.Layer):
"""Random log normal op as an initializer with trainable loc and scale."""

def __init__(self,
loc_initializer=tf.keras.initializers.TruncatedNormal(
loc_initializer=tf.python.keras.initializers.TruncatedNormal(
stddev=1e-5),
scale_initializer=tf.keras.initializers.TruncatedNormal(
scale_initializer=tf.python.keras.initializers.TruncatedNormal(
mean=-3., stddev=0.1),
loc_regularizer=None,
scale_regularizer=None,
Expand Down Expand Up @@ -451,13 +451,13 @@ def get_config(self):
}


class TrainableNormal(tf.keras.layers.Layer):
class TrainableNormal(tf.python.keras.layers.Layer):
"""Random normal op as an initializer with trainable mean and stddev."""

def __init__(self,
mean_initializer=tf.keras.initializers.TruncatedNormal(
mean_initializer=tf.python.keras.initializers.TruncatedNormal(
stddev=1e-5),
stddev_initializer=tf.keras.initializers.TruncatedNormal(
stddev_initializer=tf.python.keras.initializers.TruncatedNormal(
mean=-3., stddev=0.1),
mean_regularizer=None,
stddev_regularizer=None,
Expand Down Expand Up @@ -544,7 +544,7 @@ class TrainableHeNormal(TrainableNormal):

def __init__(self, seed=None, **kwargs):
super(TrainableHeNormal, self).__init__(
mean_initializer=tf.keras.initializers.he_normal(seed),
mean_initializer=tf.python.keras.initializers.he_normal(seed),
seed=seed,
**kwargs)

Expand All @@ -570,7 +570,7 @@ class TrainableGlorotNormal(TrainableNormal):

def __init__(self, seed=None, **kwargs):
super(TrainableGlorotNormal, self).__init__(
mean_initializer=tf.keras.initializers.GlorotNormal(seed),
mean_initializer=tf.python.keras.initializers.GlorotNormal(seed),
seed=seed,
**kwargs)

Expand Down Expand Up @@ -611,12 +611,12 @@ def build(self, shape, dtype=None):
self.built = True


class TrainableNormalFixedStddev(tf.keras.layers.Layer):
class TrainableNormalFixedStddev(tf.python.keras.layers.Layer):
"""Random normal op as an initializer with trainable mean and fixed stddev."""

def __init__(self,
stddev=1.,
mean_initializer=tf.keras.initializers.TruncatedNormal(
mean_initializer=tf.python.keras.initializers.TruncatedNormal(
stddev=1e-5),
mean_regularizer=None,
mean_constraint=None,
Expand Down Expand Up @@ -664,7 +664,7 @@ def get_config(self):
}


class RandomSign(tf.keras.initializers.Initializer):
class RandomSign(tf.python.keras.initializers.Initializer):
"""Initializer that generates tensors initialized to +/- 1.

Attributes:
Expand Down Expand Up @@ -694,12 +694,12 @@ def get_config(self):
}


class TrainableMixtureOfDeltas(tf.keras.layers.Layer):
class TrainableMixtureOfDeltas(tf.python.keras.layers.Layer):
"""Mixture of deltas as an initializer with trainable locations."""

def __init__(self,
num_components=5,
loc_initializer=tf.keras.initializers.he_normal(),
loc_initializer=tf.python.keras.initializers.he_normal(),
loc_regularizer=None,
loc_constraint=None,
seed=None,
Expand Down Expand Up @@ -756,7 +756,7 @@ def get_config(self):
}


class OrthogonalRandomFeatures(tf.keras.initializers.Orthogonal):
class OrthogonalRandomFeatures(tf.python.keras.initializers.Orthogonal):
"""Generates a orthogonal Gaussian matrix for a random feature Dense layer.

Generates a 2D matrix of form W = stddev * Q @ S [1], where Q is a random
Expand Down Expand Up @@ -829,7 +829,7 @@ def get_config(self):
config.update(new_config)
return config

# Compatibility aliases, following tf.keras
# Compatibility aliases, following tf.python.keras

# pylint: disable=invalid-name
scaled_normal_std_dev = ScaledNormalStdDev
Expand All @@ -847,15 +847,15 @@ def get_config(self):
orthogonal_random_features = OrthogonalRandomFeatures
# pylint: enable=invalid-name

# Utility functions, following tf.keras
# Utility functions, following tf.python.keras


def serialize(initializer):
return tf.keras.utils.serialize_keras_object(initializer)
return tf.python.keras.utils.serialize_keras_object(initializer)


def deserialize(config, custom_objects=None):
return tf.keras.utils.deserialize_keras_object(
return tf.python.keras.utils.deserialize_keras_object(
config,
module_objects=globals(),
custom_objects=custom_objects,
Expand All @@ -881,4 +881,4 @@ def get(identifier, value=None):
pass
elif callable(identifier):
return identifier
return tf.keras.initializers.get(value)
return tf.python.keras.initializers.get(value)
6 changes: 3 additions & 3 deletions edward2/tensorflow/initializers_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -136,11 +136,11 @@ def testInitializersGet(self):
self.assertIsInstance(ed.initializers.get('trainable_normal'),
ed.initializers.TrainableNormal)
# This is working correctly, but the test won't pass currently because TF
# isn't consistent (yet). Specifically, tf.keras.initializers.get('zeros')
# returns a certain class while tf.keras.initializers.zeros (or Zeros)
# isn't consistent (yet). Specifically, tf.python.keras.initializers.get('zeros')
# returns a certain class while tf.python.keras.initializers.zeros (or Zeros)
# currently returns v2 of that class.
# self.assertIsInstance(ed.initializers.get('zeros'),
# tf.keras.initializers.Zeros().__class__)
# tf.python.keras.initializers.Zeros().__class__)


if __name__ == '__main__':
Expand Down
Loading