# import libraries
import matplotlib.pyplot as plt
import numpy as np
from tensorflow.keras.datasets import reuters
from tensorflow.keras.utils import to_categorical
from tensorflow.keras import models, layers, optimizers, backend
6 Reuters
# load dataset
= 10000
num_words
= reuters.load_data(num_words = num_words)
(train_data, train_labels,), (test_data, test_labels)
train_data.shape, train_labels.shape, test_data.shape, test_labels.shape
= 300 # the avg is 145.54
seq_len
= [seq[:seq_len] for seq in train_data]
X_train = [np.append([0] * (seq_len - len(seq)), seq) for seq in X_train]
X_train = np.array(X_train).astype(int)
X_train
= to_categorical(train_labels)
y_train
= [seq[:seq_len] for seq in test_data]
X_test = [np.append([0] * (seq_len - len(seq)), seq) for seq in X_test]
X_test = np.array(X_test).astype(int)
X_test
= to_categorical(test_labels)
y_test
X_train.shape, y_train.shape, X_test.shape, y_test.shape
= X_train[:4500]
partial_X_train = y_train[:4500]
partial_y_train = X_train[4500:]
X_val = y_train[4500:] y_val
def explore(X_train,
y_train,
X_val,
y_val,
embedding_dim,
learning_rate,
momentum):
# define ann architecture
= models.Sequential()
model = seq_len))
model.add(layers.Embedding(num_words, embedding_dim, input_length 64, activation = "relu"))
model.add(layers.Dense(46, activation = "sigmoid"))
model.add(layers.Dense(
# define optimizer, loss function, and metrics
= optimizers.RMSprop(learning_rate = learning_rate, momentum = momentum)
optimizer
# train ann model
compile(optimizer = optimizer, loss = "categorical_crossentropy", metrics = ["accuracy"])
model.= 20, batch_size = 64, verbose = 0)
model.fit(X_train, y_train, epochs
# evaluate ann model
= model.evaluate(X_val, y_val, verbose = 0)
val_loss, val_acc
return val_loss, val_acc
# set hyperparameters
= np.logspace(-2, -4, 5)
learning_rate_list = np.linspace(0.1, 0.9, 5)
momentum_list = 2 ** np.arange(3, 7)
embedding_dim_list
= []
param_list for learning_rate in learning_rate_list:
for momentum in momentum_list:
for embedding_dim in embedding_dim_list:
param_list.append({"learning_rate": learning_rate,
"momentum": momentum,
"embedding_dim": embedding_dim
})
= []
results for params in param_list:
= explore(
val_loss, val_acc
partial_X_train,
partial_y_train,
X_val,
y_val,= params["embedding_dim"],
embedding_dim = params["learning_rate"],
learning_rate = params["momentum"],
momentum
)
"val_loss": val_loss,
results.append({"val_acc": val_acc,
"params": params})
backend.clear_session()
# get optimal parameters
= [result["val_acc"] for result in results]
val_accuracies = results[np.argmax(val_accuracies)]["params"]
opt_params
opt_params
# define ann architecture
= models.Sequential()
model for i in range(opt_params["n_layers"]):
"n_units"], activation = opt_params["activation"]))
model.add(layers.Dense(opt_params[1, activation = "sigmoid"))
model.add(layers.Dense(
# define optimizer, loss function, and metrics
= optimizers.RMSprop(learning_rate = opt_params["learning_rate"],
optimizer = opt_params["momentum"])
momentum
# train ann model
= (10000,))
model.build(input_shape compile(optimizer = optimizer, loss = "binary_crossentropy", metrics = ["accuracy"])
model.
= model.fit(X_train, y_train, epochs = 20, batch_size = 64, verbose = 0) history
= history['loss']
loss
= range(1, len(loss) + 1)
epochs
= 'bo'
blue_dots = 'b'
solid_blue_line
= 'Training loss')
plt.plot(epochs, loss, solid_blue_line, label 'Training loss')
plt.title('Epochs')
plt.xlabel('Loss')
plt.ylabel(
plt.legend()
plt.show()
= history['accuracy']
accuracy
= range(1, len(accuracy) + 1)
epochs
= 'bo'
blue_dots = 'b'
solid_blue_line
= 'Training accuracy')
plt.plot(epochs, accuracy, solid_blue_line, label 'Training accuracy')
plt.title('Epochs')
plt.xlabel('accuracy')
plt.ylabel(
plt.legend()
plt.show()
model.evaluate(X_test, y_test)