Training and Evaluation
Training and Evaluation
Training and Evaluation
io/guides/training_with_built_in_methods/
fit() evaluate()
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
fit()
fit()
tf.data.Dataset
1 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
y_train = y_train.astype("float32")
y_test = y_test.astype("float32")
model.compile(
optimizer=keras.optimizers.RMSprop(), # Optimizer
# Loss function to minimize
loss=keras.losses.SparseCategoricalCrossentropy(),
# List of metrics to monitor
metrics=[keras.metrics.SparseCategoricalAccuracy()],
)
fit() batch_size
epochs
history
history.history
evaluate()
2 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
compile()
fit()
compile()
model.compile(
optimizer=keras.optimizers.RMSprop(learning_rate=1e-3),
loss=keras.losses.SparseCategoricalCrossentropy(),
metrics=[keras.metrics.SparseCategoricalAccuracy()],
)
metrics
model.compile(
optimizer="rmsprop",
loss="sparse_categorical_crossentropy",
metrics=["sparse_categorical_accuracy"],
)
def get_uncompiled_model():
inputs = keras.Input(shape=(784,), name="digits")
x = layers.Dense(64, activation="relu", name="dense_1")(inputs)
x = layers.Dense(64, activation="relu", name="dense_2")(x)
outputs = layers.Dense(10, activation="softmax", name="predictions")(x)
model = keras.Model(inputs=inputs, outputs=outputs)
return model
def get_compiled_model():
model = get_uncompiled_model()
model.compile(
optimizer="rmsprop",
loss="sparse_categorical_crossentropy",
metrics=["sparse_categorical_accuracy"],
)
return model
SGD()
RMSprop()
Adam()
3 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
MeanSquaredError()
KLDivergence()
CosineSimilarity()
AUC()
Precision()
Recall()
y_true y_pred
model = get_uncompiled_model()
model.compile(optimizer=keras.optimizers.Adam(), loss=custom_mean_squared_error)
<keras.callbacks.History at 0x159159fd0>
y_true y_pred
tf.keras.losses.Loss
__init__(self)
call(self, y_true, y_pred)
class CustomMSE(keras.losses.Loss):
def __init__(self, regularization_factor=0.1, name="custom_mse"):
super().__init__(name=name)
self.regularization_factor = regularization_factor
model = get_uncompiled_model()
model.compile(optimizer=keras.optimizers.Adam(), loss=CustomMSE())
4 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
<keras.callbacks.History at 0x1599fd650>
tf.keras.metrics.Metric
__init__(self)
update_state(self, y_true, y_pred, sample_weight=None)
result(self)
reset_state(self)
update_state() result()
CategoricalTruePositives
class CategoricalTruePositives(keras.metrics.Metric):
def __init__(self, name="categorical_true_positives", **kwargs):
super(CategoricalTruePositives, self).__init__(name=name, **kwargs)
self.true_positives = self.add_weight(name="ctp", initializer="zeros")
def result(self):
return self.true_positives
def reset_state(self):
# The state of the metric will be reset at the start of each epoch.
self.true_positives.assign(0.0)
model = get_uncompiled_model()
model.compile(
optimizer=keras.optimizers.RMSprop(learning_rate=1e-3),
loss=keras.losses.SparseCategoricalCrossentropy(),
metrics=[CategoricalTruePositives()],
)
model.fit(x_train, y_train, batch_size=64, epochs=3)
Epoch 1/3
782/782 [==============================] - 2s 2ms/step - loss: 0.3414 -
categorical_true_positives: 45121.0000
Epoch 2/3
782/782 [==============================] - 2s 2ms/step - loss: 0.1533 -
categorical_true_positives: 47725.0000
Epoch 3/3
782/782 [==============================] - 1s 2ms/step - loss: 0.1120 -
categorical_true_positives: 48333.0000
<keras.callbacks.History at 0x159b4b250>
y_true y_pred
y_pred
self.add_loss(loss_value)
compile()
5 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
class ActivityRegularizationLayer(layers.Layer):
def call(self, inputs):
self.add_loss(tf.reduce_sum(inputs) * 0.1)
return inputs # Pass-through layer.
<keras.callbacks.History at 0x159cb87d0>
add_metric()
class MetricLoggingLayer(layers.Layer):
def call(self, inputs):
# The `aggregation` argument defines
# how to aggregate the per-batch values
# over each epoch:
# in this case we simply average them.
self.add_metric(
keras.backend.std(inputs), name="std_of_activation", aggregation="mean"
)
return inputs # Pass-through layer.
<keras.callbacks.History at 0x159e1dbd0>
model.add_loss(loss_tensor)
model.add_metric(metric_tensor, name, aggregation)
6 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
model.add_loss(tf.reduce_sum(x1) * 0.1)
model.compile(
optimizer=keras.optimizers.RMSprop(1e-3),
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True),
)
model.fit(x_train, y_train, batch_size=64, epochs=1)
<keras.callbacks.History at 0x159f9e690>
add_loss() compile()
LogisticEndpoint
add_loss() add_metric()
class LogisticEndpoint(keras.layers.Layer):
def __init__(self, name=None):
super(LogisticEndpoint, self).__init__(name=name)
self.loss_fn = keras.losses.BinaryCrossentropy(from_logits=True)
self.accuracy_fn = keras.metrics.BinaryAccuracy()
loss
import numpy as np
data = {
"inputs": np.random.random((3, 3)),
"targets": np.random.random((3, 10)),
}
model.fit(data)
<keras.callbacks.History at 0x15a15fa90>
7 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
validation_data
(x_val, y_val)
validation_split
validation_split=0.2 validation_split=0.6
fit()
validation_split
model = get_compiled_model()
model.fit(x_train, y_train, batch_size=64, validation_split=0.2, epochs=1)
<keras.callbacks.History at 0x15a223bd0>
tf.data.Dataset
tf.data
Datasets
model = get_compiled_model()
8 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
Epoch 1/3
782/782 [==============================] - 2s 2ms/step - loss: 0.3358 -
sparse_categorical_accuracy: 0.9046
Epoch 2/3
782/782 [==============================] - 2s 2ms/step - loss: 0.1540 -
sparse_categorical_accuracy: 0.9544
Epoch 3/3
782/782 [==============================] - 1s 2ms/step - loss: 0.1109 -
sparse_categorical_accuracy: 0.9663
Evaluate
157/157 [==============================] - 0s 1ms/step - loss: 0.1118 -
sparse_categorical_accuracy: 0.9659
{'loss': 0.11180760711431503,
'sparse_categorical_accuracy': 0.9659000039100647}
steps_per_epoch
model = get_compiled_model()
# Only use the 100 batches per epoch (that's 64 * 100 samples)
model.fit(train_dataset, epochs=3, steps_per_epoch=100)
Epoch 1/3
100/100 [==============================] - 1s 2ms/step - loss: 0.7515 -
sparse_categorical_accuracy: 0.8031
Epoch 2/3
100/100 [==============================] - 0s 2ms/step - loss: 0.3731 -
sparse_categorical_accuracy: 0.8919
Epoch 3/3
100/100 [==============================] - 0s 2ms/step - loss: 0.3165 -
sparse_categorical_accuracy: 0.9084
<keras.callbacks.History at 0x15a405e90>
model = get_compiled_model()
<keras.callbacks.History at 0x15a530510>
validation_steps
9 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
model = get_compiled_model()
model.fit(
train_dataset,
epochs=1,
# Only run validation using the first 10 batches of the dataset
# using the `validation_steps` argument
validation_data=val_dataset,
validation_steps=10,
)
<keras.callbacks.History at 0x1663870d0>
validation_split
Dataset
Dataset
Datasets
keras.utils.Sequence
Dataset
Sequence
keras.utils.Sequence
keras.utils.Sequence
shuffle=True fit()
Sequence
__getitem__
__len__
__getitem__
on_epoch_end
10 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
class CIFAR10Sequence(Sequence):
def __init__(self, filenames, labels, batch_size):
self.filenames, self.labels = filenames, labels
self.batch_size = batch_size
def __len__(self):
return int(np.ceil(len(self.filenames) / float(self.batch_size)))
class_weight Model.fit()
Model.fit(...,
class_weight={0: 1., 1: 0.5})
import numpy as np
class_weight = {
0: 1.0,
1: 1.0,
2: 1.0,
3: 1.0,
4: 1.0,
# Set weight "2" for class "5",
# making this class 2x more important
5: 2.0,
6: 1.0,
7: 1.0,
8: 1.0,
9: 1.0,
}
<keras.callbacks.History at 0x1664ff2d0>
11 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
sample_weight Model.fit()
tf.data (input_batch, label_batch,
sample_weight_batch)
sample_weight = np.ones(shape=(len(y_train),))
sample_weight[y_train == 5] = 2.0
<keras.callbacks.History at 0x166650090>
Dataset
sample_weight = np.ones(shape=(len(y_train),))
sample_weight[y_train == 5] = 2.0
model = get_compiled_model()
model.fit(train_dataset, epochs=1)
<keras.callbacks.History at 0x1667b0e10>
(764,)
(10,)
12 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
x1 = layers.Conv2D(3, 3)(image_input)
x1 = layers.GlobalMaxPooling2D()(x1)
x2 = layers.Conv1D(3, 3)(timeseries_input)
x2 = layers.GlobalMaxPooling1D()(x2)
x = layers.concatenate([x1, x2])
model = keras.Model(
inputs=[image_input, timeseries_input], outputs=[score_output, class_output]
)
model.compile(
optimizer=keras.optimizers.RMSprop(1e-3),
loss=[keras.losses.MeanSquaredError(), keras.losses.CategoricalCrossentropy()],
)
model.compile(
optimizer=keras.optimizers.RMSprop(1e-3),
loss=[keras.losses.MeanSquaredError(), keras.losses.CategoricalCrossentropy()],
metrics=[
[
keras.metrics.MeanAbsolutePercentageError(),
keras.metrics.MeanAbsoluteError(),
],
[keras.metrics.CategoricalAccuracy()],
],
)
13 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
model.compile(
optimizer=keras.optimizers.RMSprop(1e-3),
loss={
"score_output": keras.losses.MeanSquaredError(),
"class_output": keras.losses.CategoricalCrossentropy(),
},
metrics={
"score_output": [
keras.metrics.MeanAbsolutePercentageError(),
keras.metrics.MeanAbsoluteError(),
],
"class_output": [keras.metrics.CategoricalAccuracy()],
},
)
loss_weights
model.compile(
optimizer=keras.optimizers.RMSprop(1e-3),
loss={
"score_output": keras.losses.MeanSquaredError(),
"class_output": keras.losses.CategoricalCrossentropy(),
},
metrics={
"score_output": [
keras.metrics.MeanAbsolutePercentageError(),
keras.metrics.MeanAbsoluteError(),
],
"class_output": [keras.metrics.CategoricalAccuracy()],
},
loss_weights={"score_output": 2.0, "class_output": 1.0},
)
fit()
14 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
model.compile(
optimizer=keras.optimizers.RMSprop(1e-3),
loss=[keras.losses.MeanSquaredError(), keras.losses.CategoricalCrossentropy()],
)
# Fit on lists
model.fit([img_data, ts_data], [score_targets, class_targets], batch_size=32, epochs=1)
<keras.callbacks.History at 0x166bb7490>
Dataset Dataset
train_dataset = tf.data.Dataset.from_tensor_slices(
(
{"img_input": img_data, "ts_input": ts_data},
{"score_output": score_targets, "class_output": class_targets},
)
)
train_dataset = train_dataset.shuffle(buffer_size=1024).batch(64)
model.fit(train_dataset, epochs=1)
<keras.callbacks.History at 0x1669ce250>
fit()
15 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
model = get_compiled_model()
callbacks = [
keras.callbacks.EarlyStopping(
# Stop training when `val_loss` is no longer improving
monitor="val_loss",
# "no longer improving" being defined as "no better than 1e-2 less"
min_delta=1e-2,
# "no longer improving" being further defined as "for at least 2 epochs"
patience=2,
verbose=1,
)
]
model.fit(
x_train,
y_train,
epochs=20,
batch_size=64,
callbacks=callbacks,
validation_split=0.2,
)
Epoch 1/20
625/625 [==============================] - 2s 2ms/step - loss: 0.3692 -
sparse_categorical_accuracy: 0.8946 - val_loss: 0.2295 - val_sparse_categorical_accuracy: 0.9287
Epoch 2/20
625/625 [==============================] - 1s 2ms/step - loss: 0.1683 -
sparse_categorical_accuracy: 0.9498 - val_loss: 0.1777 - val_sparse_categorical_accuracy: 0.9473
Epoch 3/20
625/625 [==============================] - 1s 2ms/step - loss: 0.1225 -
sparse_categorical_accuracy: 0.9633 - val_loss: 0.1517 - val_sparse_categorical_accuracy: 0.9546
Epoch 4/20
625/625 [==============================] - 1s 2ms/step - loss: 0.0968 -
sparse_categorical_accuracy: 0.9701 - val_loss: 0.1403 - val_sparse_categorical_accuracy: 0.9597
Epoch 5/20
625/625 [==============================] - 1s 2ms/step - loss: 0.0811 -
sparse_categorical_accuracy: 0.9754 - val_loss: 0.1394 - val_sparse_categorical_accuracy: 0.9579
Epoch 6/20
625/625 [==============================] - 1s 2ms/step - loss: 0.0674 -
sparse_categorical_accuracy: 0.9802 - val_loss: 0.1564 - val_sparse_categorical_accuracy: 0.9574
Epoch 6: early stopping
<keras.callbacks.History at 0x166c3fe50>
ModelCheckpoint
EarlyStopping
TensorBoard
CSVLogger
keras.callbacks.Callback
self.model
class LossHistory(keras.callbacks.Callback):
def on_train_begin(self, logs):
self.per_batch_losses = []
16 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
ModelCheckpoint
model = get_compiled_model()
callbacks = [
keras.callbacks.ModelCheckpoint(
# Path where to save the model
# The two parameters below mean that we will overwrite
# the current checkpoint if and only if
# the `val_loss` score has improved.
# The saved model name will include the current epoch.
filepath="mymodel_{epoch}",
save_best_only=True, # Only save a model if `val_loss` has improved.
monitor="val_loss",
verbose=1,
)
]
model.fit(
x_train, y_train, epochs=2, batch_size=64, callbacks=callbacks, validation_split=0.2
)
Epoch 1/2
617/625 [============================>.] - ETA: 0s - loss: 0.3668 - sparse_categorical_accuracy:
0.8954
Epoch 1: val_loss improved from inf to 0.22688, saving model to mymodel_1
INFO:tensorflow:Assets written to: mymodel_1/assets
625/625 [==============================] - 2s 3ms/step - loss: 0.3645 -
sparse_categorical_accuracy: 0.8960 - val_loss: 0.2269 - val_sparse_categorical_accuracy: 0.9332
Epoch 2/2
622/625 [============================>.] - ETA: 0s - loss: 0.1748 - sparse_categorical_accuracy:
0.9480
Epoch 2: val_loss improved from 0.22688 to 0.17561, saving model to mymodel_2
INFO:tensorflow:Assets written to: mymodel_2/assets
625/625 [==============================] - 2s 2ms/step - loss: 0.1750 -
sparse_categorical_accuracy: 0.9480 - val_loss: 0.1756 - val_sparse_categorical_accuracy: 0.9477
<keras.callbacks.History at 0x15a2f1910>
ModelCheckpoint
17 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
import os
def make_or_restore_model():
# Either restore the latest model, or create a fresh one
# if there is no checkpoint available.
checkpoints = [checkpoint_dir + "/" + name for name in os.listdir(checkpoint_dir)]
if checkpoints:
latest_checkpoint = max(checkpoints, key=os.path.getctime)
print("Restoring from", latest_checkpoint)
return keras.models.load_model(latest_checkpoint)
print("Creating a new model")
return get_compiled_model()
model = make_or_restore_model()
callbacks = [
# This callback saves a SavedModel every 100 batches.
# We include the training loss in the saved model name.
keras.callbacks.ModelCheckpoint(
filepath=checkpoint_dir + "/ckpt-loss={loss:.2f}", save_freq=100
)
]
model.fit(x_train, y_train, epochs=1, callbacks=callbacks)
18 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
<keras.callbacks.History at 0x167035e50>
learning_rate
initial_learning_rate = 0.1
lr_schedule = keras.optimizers.schedules.ExponentialDecay(
initial_learning_rate, decay_steps=100000, decay_rate=0.96, staircase=True
)
optimizer = keras.optimizers.RMSprop(learning_rate=lr_schedule)
19 of 20 01-12-2022, 12:31 am
Firefox https://keras.io/guides/training_with_built_in_methods/
ReduceLROnPlateau
Embedding
tensorboard --logdir=/full_path_to_your_logs
fit() TensorBoard
keras.callbacks.TensorBoard(
log_dir="/full_path_to_your_logs",
histogram_freq=0, # How often to log histogram visualizations
embeddings_freq=0, # How often to log embedding visualizations
update_freq="epoch",
) # How often to write logs (default: once per epoch)
<keras.callbacks.TensorBoard at 0x12fa767d0>…
20 of 20 01-12-2022, 12:31 am