ckiias/lec4-5-nlp-lstm.ipynb

29 KiB

In [1]:
import os

os.environ["KERAS_BACKEND"] = "jax"
import keras

print(keras.__version__)
3.9.2
In [2]:
from keras.api.datasets import imdb
import os

unique_words = 10000
max_length = 100

output_dir = "tmp"
if not os.path.exists(output_dir):
    os.makedirs(output_dir)

(X_train, y_train), (X_valid, y_valid) = imdb.load_data(num_words=unique_words)
In [3]:
from keras.api.preprocessing.sequence import pad_sequences

X_train = pad_sequences(X_train, maxlen=max_length, padding="pre", truncating="pre", value=0)
X_valid = pad_sequences(X_valid, maxlen=max_length, padding="pre", truncating="pre", value=0)
In [4]:
from keras.api.models import Sequential
from keras.api.layers import InputLayer, Embedding, SpatialDropout1D, LSTM, Dense

lstm_model = Sequential()
lstm_model.add(InputLayer(shape=(max_length,), dtype="float32"))
lstm_model.add(Embedding(unique_words, 64))
lstm_model.add(SpatialDropout1D(0.2))
lstm_model.add(LSTM(256, dropout=0.2))
lstm_model.add(Dense(1, activation="sigmoid"))

lstm_model.summary()
Model: "sequential"
┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓
┃ Layer (type)                     Output Shape                  Param # ┃
┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩
│ embedding (Embedding)           │ (None, 100, 64)        │       640,000 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ spatial_dropout1d               │ (None, 100, 64)        │             0 │
│ (SpatialDropout1D)              │                        │               │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ lstm (LSTM)                     │ (None, 256)            │       328,704 │
├─────────────────────────────────┼────────────────────────┼───────────────┤
│ dense (Dense)                   │ (None, 1)              │           257 │
└─────────────────────────────────┴────────────────────────┴───────────────┘
 Total params: 968,961 (3.70 MB)
 Trainable params: 968,961 (3.70 MB)
 Non-trainable params: 0 (0.00 B)
In [5]:
from keras.api.callbacks import ModelCheckpoint

lstm_model.compile(
    loss="binary_crossentropy",
    optimizer="adam",
    metrics=["accuracy"],
)

lstm_model.fit(
    X_train,
    y_train,
    batch_size=128,
    epochs=4,
    validation_data=(X_valid, y_valid),
    callbacks=[ModelCheckpoint(filepath=output_dir + "/lstm_weights.{epoch:02d}.keras")],
)
Epoch 1/4
196/196 ━━━━━━━━━━━━━━━━━━━━ 42s 214ms/step - accuracy: 0.6435 - loss: 0.6105 - val_accuracy: 0.8497 - val_loss: 0.3466
Epoch 2/4
196/196 ━━━━━━━━━━━━━━━━━━━━ 46s 231ms/step - accuracy: 0.8819 - loss: 0.2947 - val_accuracy: 0.8527 - val_loss: 0.3380
Epoch 3/4
196/196 ━━━━━━━━━━━━━━━━━━━━ 53s 273ms/step - accuracy: 0.9121 - loss: 0.2282 - val_accuracy: 0.8472 - val_loss: 0.3587
Epoch 4/4
196/196 ━━━━━━━━━━━━━━━━━━━━ 64s 325ms/step - accuracy: 0.9299 - loss: 0.1847 - val_accuracy: 0.8332 - val_loss: 0.3998
Out[5]:
<keras.src.callbacks.history.History at 0x3464a2bd0>
In [6]:
lstm_model.load_weights(output_dir + "/lstm_weights.02.keras")
lstm_model.evaluate(X_valid, y_valid)
782/782 ━━━━━━━━━━━━━━━━━━━━ 39s 50ms/step - accuracy: 0.8509 - loss: 0.3421
Out[6]:
[0.33803924918174744, 0.8527200222015381]
In [7]:
import matplotlib.pyplot as plt

plt.hist(lstm_model.predict(X_valid))
_ = plt.axvline(x=0.5, color="orange")
782/782 ━━━━━━━━━━━━━━━━━━━━ 36s 47ms/step