TensorFlow 2.x:
Creating LearningRateScheduler logs for TensorBoard can be done with the following:
from tensorflow.keras.callbacks import LearningRateScheduler, TensorBoard
# Define your scheduling function
def scheduler(epoch):
  return return 0.001 * 0.95 ** epoch
# Define scheduler
lr_scheduler = LearningRateScheduler(scheduler)
# Alternatively, use an anonymous function
# lr_scheduler = LearningRateScheduler(lambda epoch: initial_lr * 0.95 ** epoch)
# Define TensorBoard callback child class
class LRTensorBoard(TensorBoard):
  def __init__(self, log_dir, **kwargs):
    super().__init__(log_dir, **kwargs)
    self.lr_writer = tf.summary.create_file_writer(self.log_dir + '/learning')
  def on_epoch_end(self, epoch, logs=None):
    lr = getattr(self.model.optimizer, 'lr', None)
    with self.lr_writer.as_default():
      summary = tf.summary.scalar('learning_rate', lr, epoch)
    super().on_epoch_end(epoch, logs)
  def on_train_end(self, logs=None):
    super().on_train_end(logs)
    self.lr_writer.close()
# Create callback object
tensorboard_callback = LRTensorBoard(log_dir='./logs/', histogram_freq=1)
# Compile the model
model.compile(optimizer='adam',
              loss='categorical_crossentropy',
              metrics=['accuracy'])
# Train the model
r = model.fit(X_train, y_train,
              validation_data=(X_val, y_val),
              epochs=25, batch_size=200,
              callbacks=[tensorboard_callback, lr_scheduler])
The learning rate can then be viewed in TensorBoard via
# Load the TensorBoard notebook extension
%load_ext tensorboard
#Start TensorBoard
%tensorboard --logdir ./logs
