Skip to content

Commit

Permalink
CI: Unpin tf-nightly and stay on Keras 2 (#6684)
Browse files Browse the repository at this point in the history
`tf-nightly` was pinned at #6655 due to Keras 3 release.

`Keras 2 <> Keras incompatibilities` (keras-team/keras#18467) are impacting the Graph plugin and Hparams plugin, since the two plugins are not being actively developed and updating the code to be Keras 3 compatible is non-trivial, enforce using Keras 2 for the time being.

Note that `tf-nightly` is not compatible with `tf-keras` and needs to be used with `tf-keras-nightly` instead.

Googlers, see b/309503942 for more context.

Tested internally: cl/582334596

#oncall
  • Loading branch information
yatbear committed Nov 16, 2023
1 parent 7439f46 commit cf175ea
Show file tree
Hide file tree
Showing 6 changed files with 107 additions and 70 deletions.
9 changes: 8 additions & 1 deletion .github/workflows/ci.yml
Expand Up @@ -30,7 +30,8 @@ env:
BUILDTOOLS_VERSION: '3.0.0'
BUILDIFIER_SHA256SUM: 'e92a6793c7134c5431c58fbc34700664f101e5c9b1c1fcd93b97978e8b7f88db'
BUILDOZER_SHA256SUM: '3d58a0b6972e4535718cdd6c12778170ea7382de7c75bc3728f5719437ffb84d'
TENSORFLOW_VERSION: 'tf-nightly==2.16.0.dev20231018'
TENSORFLOW_VERSION: 'tf-nightly'
TF_KERAS_VERSION: 'tf-keras-nightly' # Keras 2

jobs:
build:
Expand Down Expand Up @@ -78,6 +79,12 @@ jobs:
python -m pip install -U pip
pip install "${TENSORFLOW_VERSION}"
if: matrix.tf_version_id != 'notf'
# Replace the `tf-keras` with `tf-keras-nightly` in the requirements.txt
# to avoid incompatibilities when using alongside `tf-nightly`.
# TODO: Remove this after migrating to Keras 3.
- name: 'Make user to use tf-keras-nightly with tf-nightly'
run: |
sed -i "s/^tf-keras.*/${TF_KERAS_VERSION}/g" ./tensorboard/pip_package/requirements.txt
- name: 'Install Python dependencies'
run: |
python -m pip install -U pip
Expand Down
5 changes: 4 additions & 1 deletion tensorboard/pip_package/requirements.txt
Expand Up @@ -34,4 +34,7 @@ setuptools >= 41.0.0 # Note: provides pkg_resources as well as setuptools
# requirement, and likely this will not disrupt existing users of the package.
six > 1.9
tensorboard-data-server >= 0.7.0, < 0.8.0
werkzeug >= 1.0.1
# Stay on Keras 2 for now: https://github.com/keras-team/keras/issues/18467.
# TODO: Remove this after migrating to Keras 3.
tf-keras >= 2.15.0
werkzeug >= 1.0.1
15 changes: 11 additions & 4 deletions tensorboard/plugins/graph/graphs_plugin_v2_test.py
Expand Up @@ -24,6 +24,13 @@
from tensorboard.compat.proto import graph_pb2
from tensorboard.plugins.graph import graphs_plugin_test

# Stay on Keras 2 for now: https://github.com/keras-team/keras/issues/18467.
version_fn = getattr(tf.keras, "version", None)
if version_fn and version_fn().startswith("3."):
import tf_keras as keras # Keras 2
else:
keras = tf.keras # Keras 2


class GraphsPluginV2Test(
graphs_plugin_test.GraphsPluginBaseTest, tf.test.TestCase
Expand All @@ -34,10 +41,10 @@ def generate_run(
x, y = np.ones((10, 10)), np.ones((10, 1))
val_x, val_y = np.ones((4, 10)), np.ones((4, 1))

model = tf.keras.Sequential(
model = keras.Sequential(
[
tf.keras.layers.Dense(10, activation="relu"),
tf.keras.layers.Dense(1, activation="sigmoid"),
keras.layers.Dense(10, activation="relu"),
keras.layers.Dense(1, activation="sigmoid"),
]
)
model.compile("rmsprop", "binary_crossentropy")
Expand All @@ -49,7 +56,7 @@ def generate_run(
batch_size=2,
epochs=1,
callbacks=[
tf.compat.v2.keras.callbacks.TensorBoard(
keras.callbacks.TensorBoard(
log_dir=os.path.join(logdir, run_name),
write_graph=include_graph,
)
Expand Down
125 changes: 66 additions & 59 deletions tensorboard/plugins/graph/keras_util_test.py
Expand Up @@ -21,6 +21,13 @@

from tensorboard.plugins.graph import keras_util

# Stay on Keras 2 for now: https://github.com/keras-team/keras/issues/18467.
version_fn = getattr(tf.keras, "version", None)
if version_fn and version_fn().startswith("3."):
import tf_keras as keras # Keras 2
else:
keras = tf.keras # Keras 2


class KerasUtilTest(tf.test.TestCase):
def assertGraphDefToModel(self, expected_proto, model):
Expand Down Expand Up @@ -112,12 +119,12 @@ def DISABLED_test_keras_model_to_graph_def_sequential_model(self):
}
}
"""
model = tf.keras.models.Sequential(
model = keras.models.Sequential(
[
tf.keras.layers.Dense(32, input_shape=(784,)),
tf.keras.layers.Activation("relu", name="my_relu"),
tf.keras.layers.Dense(10),
tf.keras.layers.Activation("softmax"),
keras.layers.Dense(32, input_shape=(784,)),
keras.layers.Activation("relu", name="my_relu"),
keras.layers.Dense(10),
keras.layers.Activation("softmax"),
]
)
self.assertGraphDefToModel(expected_proto, model)
Expand Down Expand Up @@ -188,12 +195,12 @@ def test_keras_model_to_graph_def_functional_model(self):
}
}
"""
inputs = tf.keras.layers.Input(shape=(784,), name="functional_input")
d0 = tf.keras.layers.Dense(64, activation="relu")
d1 = tf.keras.layers.Dense(64, activation="relu")
d2 = tf.keras.layers.Dense(64, activation="relu")
inputs = keras.layers.Input(shape=(784,), name="functional_input")
d0 = keras.layers.Dense(64, activation="relu")
d1 = keras.layers.Dense(64, activation="relu")
d2 = keras.layers.Dense(64, activation="relu")

model = tf.keras.models.Model(
model = keras.models.Model(
inputs=inputs, outputs=d2(d1(d0(inputs))), name="model"
)
self.assertGraphDefToModel(expected_proto, model)
Expand Down Expand Up @@ -265,12 +272,12 @@ def test_keras_model_to_graph_def_functional_model_with_cycle(self):
}
}
"""
inputs = tf.keras.layers.Input(shape=(784,), name="cycle_input")
d0 = tf.keras.layers.Dense(64, activation="relu")
d1 = tf.keras.layers.Dense(64, activation="relu")
d2 = tf.keras.layers.Dense(64, activation="relu")
inputs = keras.layers.Input(shape=(784,), name="cycle_input")
d0 = keras.layers.Dense(64, activation="relu")
d1 = keras.layers.Dense(64, activation="relu")
d2 = keras.layers.Dense(64, activation="relu")

model = tf.keras.models.Model(
model = keras.models.Model(
inputs=inputs, outputs=d1(d2(d1(d0(inputs)))), name="model"
)
self.assertGraphDefToModel(expected_proto, model)
Expand Down Expand Up @@ -309,10 +316,10 @@ def test_keras_model_to_graph_def_lstm_model(self):
}
}
"""
inputs = tf.keras.layers.Input(shape=(None, 5), name="lstm_input")
encoder = tf.keras.layers.SimpleRNN(256)
inputs = keras.layers.Input(shape=(None, 5), name="lstm_input")
encoder = keras.layers.SimpleRNN(256)

model = tf.keras.models.Model(
model = keras.models.Model(
inputs=inputs, outputs=encoder(inputs), name="model"
)
self.assertGraphDefToModel(expected_proto, model)
Expand Down Expand Up @@ -447,25 +454,25 @@ def DISABLED_test_keras_model_to_graph_def_nested_sequential_model(self):
}
}
"""
sub_sub_model = tf.keras.models.Sequential(
sub_sub_model = keras.models.Sequential(
[
tf.keras.layers.Dense(32, input_shape=(784,)),
tf.keras.layers.Activation("relu"),
keras.layers.Dense(32, input_shape=(784,)),
keras.layers.Activation("relu"),
]
)

sub_model = tf.keras.models.Sequential(
sub_model = keras.models.Sequential(
[
sub_sub_model,
tf.keras.layers.Activation("relu", name="my_relu"),
keras.layers.Activation("relu", name="my_relu"),
]
)

model = tf.keras.models.Sequential(
model = keras.models.Sequential(
[
sub_model,
tf.keras.layers.Dense(10),
tf.keras.layers.Activation("softmax"),
keras.layers.Dense(10),
keras.layers.Activation("softmax"),
]
)

Expand Down Expand Up @@ -601,27 +608,27 @@ def test_keras_model_to_graph_def_functional_multi_inputs(self):
}
}
"""
main_input = tf.keras.layers.Input(
main_input = keras.layers.Input(
shape=(100,), dtype="int32", name="main_input"
)
x = tf.keras.layers.Embedding(
x = keras.layers.Embedding(
output_dim=512, input_dim=10000, input_length=100
)(main_input)
rnn_out = tf.keras.layers.SimpleRNN(32)(x)
rnn_out = keras.layers.SimpleRNN(32)(x)

auxiliary_output = tf.keras.layers.Dense(
auxiliary_output = keras.layers.Dense(
1, activation="sigmoid", name="aux_output"
)(rnn_out)
auxiliary_input = tf.keras.layers.Input(shape=(5,), name="aux_input")
auxiliary_input = keras.layers.Input(shape=(5,), name="aux_input")

x = tf.keras.layers.concatenate([rnn_out, auxiliary_input])
x = tf.keras.layers.Dense(64, activation="relu")(x)
x = keras.layers.concatenate([rnn_out, auxiliary_input])
x = keras.layers.Dense(64, activation="relu")(x)

main_output = tf.keras.layers.Dense(
main_output = keras.layers.Dense(
1, activation="sigmoid", name="main_output"
)(x)

model = tf.keras.models.Model(
model = keras.models.Model(
inputs=[main_input, auxiliary_input],
outputs=[main_output, auxiliary_output],
name="model",
Expand Down Expand Up @@ -757,22 +764,22 @@ def test_keras_model_to_graph_def_functional_model_as_layer(self):
}
}
"""
inputs1 = tf.keras.layers.Input(shape=(784,), name="sub_func_input_1")
inputs2 = tf.keras.layers.Input(shape=(784,), name="sub_func_input_2")
d0 = tf.keras.layers.Dense(64, activation="relu")
d1 = tf.keras.layers.Dense(64, activation="relu")
d2 = tf.keras.layers.Dense(64, activation="relu")
inputs1 = keras.layers.Input(shape=(784,), name="sub_func_input_1")
inputs2 = keras.layers.Input(shape=(784,), name="sub_func_input_2")
d0 = keras.layers.Dense(64, activation="relu")
d1 = keras.layers.Dense(64, activation="relu")
d2 = keras.layers.Dense(64, activation="relu")

sub_model = tf.keras.models.Model(
sub_model = keras.models.Model(
inputs=[inputs2, inputs1],
outputs=[d0(inputs1), d1(inputs2)],
name="model",
)

main_outputs = d2(
tf.keras.layers.concatenate(sub_model([inputs2, inputs1]))
keras.layers.concatenate(sub_model([inputs2, inputs1]))
)
model = tf.keras.models.Model(
model = keras.models.Model(
inputs=[inputs2, inputs1],
outputs=main_outputs,
name="model_1",
Expand Down Expand Up @@ -864,16 +871,16 @@ def DISABLED_test_keras_model_to_graph_def_functional_sequential_model(
}
}
"""
inputs = tf.keras.layers.Input(shape=(784,), name="func_seq_input")
sub_model = tf.keras.models.Sequential(
inputs = keras.layers.Input(shape=(784,), name="func_seq_input")
sub_model = keras.models.Sequential(
[
tf.keras.layers.Dense(32, input_shape=(784,)),
tf.keras.layers.Activation("relu", name="my_relu"),
keras.layers.Dense(32, input_shape=(784,)),
keras.layers.Activation("relu", name="my_relu"),
]
)
dense = tf.keras.layers.Dense(64, activation="relu")
dense = keras.layers.Dense(64, activation="relu")

model = tf.keras.models.Model(
model = keras.models.Model(
inputs=inputs, outputs=dense(sub_model(inputs))
)

Expand Down Expand Up @@ -962,15 +969,15 @@ def DISABLED_test_keras_model_to_graph_def_sequential_functional_model(
}
}
"""
inputs = tf.keras.layers.Input(shape=(784,), name="func_seq_input")
dense = tf.keras.layers.Dense(64, activation="relu")
inputs = keras.layers.Input(shape=(784,), name="func_seq_input")
dense = keras.layers.Dense(64, activation="relu")

sub_model = tf.keras.models.Model(inputs=inputs, outputs=dense(inputs))
model = tf.keras.models.Sequential(
sub_model = keras.models.Model(inputs=inputs, outputs=dense(inputs))
model = keras.models.Sequential(
[
sub_model,
tf.keras.layers.Dense(32, input_shape=(784,)),
tf.keras.layers.Activation("relu", name="my_relu"),
keras.layers.Dense(32, input_shape=(784,)),
keras.layers.Activation("relu", name="my_relu"),
]
)

Expand Down Expand Up @@ -1029,16 +1036,16 @@ def test_keras_model_to_graph_def_functional_multiple_inbound_nodes_from_same_no
}
}
"""
inputs = tf.keras.Input(shape=(2,))
inputs = keras.Input(shape=(2,))
doubling_layer = _DoublingLayer()
reducing_layer = tf.keras.layers.Add()
reducing_layer = keras.layers.Add()
outputs = reducing_layer(doubling_layer(inputs))
model = tf.keras.Model(inputs=[inputs], outputs=outputs)
model = keras.Model(inputs=[inputs], outputs=outputs)

self.assertGraphDefToModel(expected_proto, model)


class _DoublingLayer(tf.keras.layers.Layer):
class _DoublingLayer(keras.layers.Layer):
def call(self, inputs):
return inputs, inputs

Expand Down
11 changes: 9 additions & 2 deletions tensorboard/plugins/hparams/_keras.py
Expand Up @@ -24,8 +24,15 @@
from tensorboard.plugins.hparams import summary
from tensorboard.plugins.hparams import summary_v2

# Stay on Keras 2 for now: https://github.com/keras-team/keras/issues/18467.
version_fn = getattr(tf.keras, "version", None)
if version_fn and version_fn().startswith("3."):
import tf_keras as keras # Keras 2
else:
keras = tf.keras # Keras 2

class Callback(tf.keras.callbacks.Callback):

class Callback(keras.callbacks.Callback):
"""Callback for logging hyperparameters to TensorBoard.
NOTE: This callback only works in TensorFlow eager mode.
Expand All @@ -34,7 +41,7 @@ class Callback(tf.keras.callbacks.Callback):
def __init__(self, writer, hparams, trial_id=None):
"""Create a callback for logging hyperparameters to TensorBoard.
As with the standard `tf.keras.callbacks.TensorBoard` class, each
As with the standard `keras.callbacks.TensorBoard` class, each
callback object is valid for only one call to `model.fit`.
Args:
Expand Down
12 changes: 9 additions & 3 deletions tensorboard/plugins/hparams/_keras_test.py
Expand Up @@ -25,6 +25,12 @@
from tensorboard.plugins.hparams import plugin_data_pb2
from tensorboard.plugins.hparams import summary_v2 as hp

# Stay on Keras 2 for now: https://github.com/keras-team/keras/issues/18467.
version_fn = getattr(tf.keras, "version", None)
if version_fn and version_fn().startswith("3."):
import tf_keras as keras # Keras 2
else:
keras = tf.keras # Keras 2

tf.compat.v1.enable_eager_execution()

Expand All @@ -40,12 +46,12 @@ def _initialize_model(self, writer):
"optimizer": "adam",
HP_DENSE_NEURONS: 8,
}
self.model = tf.keras.models.Sequential(
self.model = keras.models.Sequential(
[
tf.keras.layers.Dense(
keras.layers.Dense(
self.hparams[HP_DENSE_NEURONS], input_shape=(1,)
),
tf.keras.layers.Dense(1, activation="sigmoid"),
keras.layers.Dense(1, activation="sigmoid"),
]
)
self.model.compile(loss="mse", optimizer=self.hparams["optimizer"])
Expand Down

0 comments on commit cf175ea

Please sign in to comment.