text
stringlengths 0
4.99k
|
---|
outputs = layers.Dense(10, name="predictions")(x) |
model = keras.Model(inputs=inputs, outputs=outputs) |
model.compile( |
optimizer=keras.optimizers.RMSprop(learning_rate=1e-3), |
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True), |
) |
# The displayed loss will be much higher than before |
# due to the regularization component. |
model.fit(x_train, y_train, batch_size=64, epochs=1) |
782/782 [==============================] - 1s 828us/step - loss: 3.5361 |
<tensorflow.python.keras.callbacks.History at 0x14d6de210> |
You can do the same for logging metric values, using add_metric(): |
class MetricLoggingLayer(layers.Layer): |
def call(self, inputs): |
# The `aggregation` argument defines |
# how to aggregate the per-batch values |
# over each epoch: |
# in this case we simply average them. |
self.add_metric( |
keras.backend.std(inputs), name="std_of_activation", aggregation="mean" |
) |
return inputs # Pass-through layer. |
inputs = keras.Input(shape=(784,), name="digits") |
x = layers.Dense(64, activation="relu", name="dense_1")(inputs) |
# Insert std logging as a layer. |
x = MetricLoggingLayer()(x) |
x = layers.Dense(64, activation="relu", name="dense_2")(x) |
outputs = layers.Dense(10, name="predictions")(x) |
model = keras.Model(inputs=inputs, outputs=outputs) |
model.compile( |
optimizer=keras.optimizers.RMSprop(learning_rate=1e-3), |
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True), |
) |
model.fit(x_train, y_train, batch_size=64, epochs=1) |
782/782 [==============================] - 1s 859us/step - loss: 0.5469 - std_of_activation: 0.9414 |
<tensorflow.python.keras.callbacks.History at 0x14d827ed0> |
In the Functional API, you can also call model.add_loss(loss_tensor), or model.add_metric(metric_tensor, name, aggregation). |
Here's a simple example: |
inputs = keras.Input(shape=(784,), name="digits") |
x1 = layers.Dense(64, activation="relu", name="dense_1")(inputs) |
x2 = layers.Dense(64, activation="relu", name="dense_2")(x1) |
outputs = layers.Dense(10, name="predictions")(x2) |
model = keras.Model(inputs=inputs, outputs=outputs) |
model.add_loss(tf.reduce_sum(x1) * 0.1) |
model.add_metric(keras.backend.std(x1), name="std_of_activation", aggregation="mean") |
model.compile( |
optimizer=keras.optimizers.RMSprop(1e-3), |
loss=keras.losses.SparseCategoricalCrossentropy(from_logits=True), |
) |
model.fit(x_train, y_train, batch_size=64, epochs=1) |
782/782 [==============================] - 1s 875us/step - loss: 3.4905 - std_of_activation: 0.0019 |
<tensorflow.python.keras.callbacks.History at 0x14d944790> |
Note that when you pass losses via add_loss(), it becomes possible to call compile() without a loss function, since the model already has a loss to minimize. |
Consider the following LogisticEndpoint layer: it takes as inputs targets & logits, and it tracks a crossentropy loss via add_loss(). It also tracks classification accuracy via add_metric(). |
class LogisticEndpoint(keras.layers.Layer): |
def __init__(self, name=None): |
super(LogisticEndpoint, self).__init__(name=name) |
self.loss_fn = keras.losses.BinaryCrossentropy(from_logits=True) |
self.accuracy_fn = keras.metrics.BinaryAccuracy() |
def call(self, targets, logits, sample_weights=None): |
# Compute the training-time loss value and add it |
# to the layer using `self.add_loss()`. |
loss = self.loss_fn(targets, logits, sample_weights) |
self.add_loss(loss) |
# Log accuracy as a metric and add it |
# to the layer using `self.add_metric()`. |
acc = self.accuracy_fn(targets, logits, sample_weights) |
self.add_metric(acc, name="accuracy") |
# Return the inference-time prediction tensor (for `.predict()`). |
return tf.nn.softmax(logits) |
You can use it in a model with two inputs (input data & targets), compiled without a loss argument, like this: |
import numpy as np |
inputs = keras.Input(shape=(3,), name="inputs") |
targets = keras.Input(shape=(10,), name="targets") |
logits = keras.layers.Dense(10)(inputs) |
predictions = LogisticEndpoint(name="predictions")(logits, targets) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.