{ "cells": [ { "cell_type": "code", "id": "initial_id", "metadata": { "collapsed": true, "ExecuteTime": { "end_time": "2025-06-13T18:30:29.866142Z", "start_time": "2025-06-13T18:30:25.905632Z" } }, "source": [ "import pandas as pd\n", "from pathlib import Path\n", "import tensorflow as tf\n", "import keras" ], "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:30:27.024673: I tensorflow/core/util/port.cc:153] oneDNN custom operations are on. You may see slightly different numerical results due to floating-point round-off errors from different computation orders. To turn them off, set the environment variable `TF_ENABLE_ONEDNN_OPTS=0`.\n", "2025-06-13 21:30:27.030485: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.\n", "2025-06-13 21:30:27.098384: I external/local_xla/xla/tsl/cuda/cudart_stub.cc:32] Could not find cuda drivers on your machine, GPU will not be used.\n", "2025-06-13 21:30:27.126152: E external/local_xla/xla/stream_executor/cuda/cuda_fft.cc:467] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", "WARNING: All log messages before absl::InitializeLog() is called are written to STDERR\n", "E0000 00:00:1749839427.186777 173645 cuda_dnn.cc:8579] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", "E0000 00:00:1749839427.198620 173645 cuda_blas.cc:1407] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", "W0000 00:00:1749839427.299296 173645 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\n", "W0000 00:00:1749839427.299363 173645 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\n", "W0000 00:00:1749839427.299370 173645 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\n", "W0000 00:00:1749839427.299374 173645 computation_placer.cc:177] computation placer already registered. Please check linkage and avoid linking the same target more than once.\n", "2025-06-13 21:30:27.308420: I tensorflow/core/platform/cpu_feature_guard.cc:210] This TensorFlow binary is optimized to use available CPU instructions in performance-critical operations.\n", "To enable the following instructions: AVX2 AVX_VNNI FMA, in other operations, rebuild TensorFlow with the appropriate compiler flags.\n" ] } ], "execution_count": 1 }, { "metadata": { "ExecuteTime": { "end_time": "2025-06-13T18:30:30.549387Z", "start_time": "2025-06-13T18:30:30.017053Z" } }, "cell_type": "code", "source": [ "data_path = Path(\"Data/\")\n", "\n", "train_files = sorted(data_path.glob(\"train/chorale_*.csv\"))\n", "valid_files = sorted(data_path.glob(\"valid/chorale_*.csv\"))\n", "test_files = sorted(data_path.glob(\"test/chorale_*.csv\"))\n", "\n", "\n", "def load_chorales(filepaths):\n", " return [pd.read_csv(filepath).values.tolist() for filepath in filepaths]\n", "\n", "train_chorales = load_chorales(train_files)\n", "valid_chorales = load_chorales(valid_files)\n", "test_chorales = load_chorales(test_files)" ], "id": "c11c09065ed24a79", "outputs": [], "execution_count": 2 }, { "metadata": { "ExecuteTime": { "end_time": "2025-06-13T18:30:30.614479Z", "start_time": "2025-06-13T18:30:30.573871Z" } }, "cell_type": "code", "source": [ "notes = set()\n", "for chorales in (train_chorales, valid_chorales, test_chorales):\n", " for chorale in chorales:\n", " for chord in chorale:\n", " notes |= set(chord)\n", "\n", "n_notes = len(notes)\n", "min_note = min(notes - {0})\n", "max_note = max(notes)\n", "\n", "assert min_note == 36\n", "assert max_note == 81" ], "id": "bd3a64ccd37d231e", "outputs": [], "execution_count": 3 }, { "metadata": { "ExecuteTime": { "end_time": "2025-06-13T18:30:30.739118Z", "start_time": "2025-06-13T18:30:30.728629Z" } }, "cell_type": "code", "source": [ "def create_target(batch):\n", " X = batch[:, :-1]\n", " Y = batch[:, 1:] # predict next note in each arpegio, at each step\n", " return X, Y\n", "\n", "def preprocess(window):\n", " window = tf.where(window == 0, window, window - min_note + 1) # shift values\n", " return tf.reshape(window, [-1]) # convert to arpegio\n", "\n", "def bach_dataset(chorales, batch_size=32, shuffle_buffer_size=None,\n", " window_size=32, window_shift=16, cache=True):\n", " def batch_window(window):\n", " return window.batch(window_size + 1)\n", "\n", " def to_windows(chorale):\n", " dataset = tf.data.Dataset.from_tensor_slices(chorale)\n", " dataset = dataset.window(window_size + 1, window_shift, drop_remainder=True)\n", " return dataset.flat_map(batch_window)\n", "\n", " chorales = tf.ragged.constant(chorales, ragged_rank=1)\n", " dataset = tf.data.Dataset.from_tensor_slices(chorales)\n", " dataset = dataset.flat_map(to_windows).map(preprocess)\n", " if cache:\n", " dataset = dataset.cache()\n", " if shuffle_buffer_size:\n", " dataset = dataset.shuffle(shuffle_buffer_size)\n", " dataset = dataset.batch(batch_size)\n", " dataset = dataset.map(create_target)\n", " return dataset.prefetch(1)" ], "id": "10ec7c14c413e81", "outputs": [], "execution_count": 4 }, { "metadata": { "ExecuteTime": { "end_time": "2025-06-13T18:30:32.291591Z", "start_time": "2025-06-13T18:30:30.842156Z" } }, "cell_type": "code", "source": [ "train_set = bach_dataset(train_chorales, shuffle_buffer_size=1000)\n", "valid_set = bach_dataset(valid_chorales)\n", "test_set = bach_dataset(test_chorales)" ], "id": "b0324d31a18d79b1", "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:30:31.566353: E external/local_xla/xla/stream_executor/cuda/cuda_platform.cc:51] failed call to cuInit: INTERNAL: CUDA error: Failed call to cuInit: UNKNOWN ERROR (303)\n" ] } ], "execution_count": 5 }, { "metadata": { "ExecuteTime": { "end_time": "2025-06-13T18:30:32.571773Z", "start_time": "2025-06-13T18:30:32.311130Z" } }, "cell_type": "code", "source": [ "model = keras.Sequential(name=\"Music_RNN\", layers=[\n", " keras.layers.Embedding(input_dim=n_notes, output_dim=5, input_shape=[None]),\n", " keras.layers.Conv1D(32, kernel_size=2, padding=\"causal\", activation=\"relu\"),\n", " keras.layers.BatchNormalization(),\n", " keras.layers.Conv1D(48, kernel_size=2, padding=\"causal\", activation=\"relu\", dilation_rate=2),\n", " keras.layers.BatchNormalization(),\n", " keras.layers.Conv1D(64, kernel_size=2, padding=\"causal\", activation=\"relu\", dilation_rate=4),\n", " keras.layers.BatchNormalization(),\n", " keras.layers.Conv1D(96, kernel_size=2, padding=\"causal\", activation=\"relu\", dilation_rate=8),\n", " keras.layers.BatchNormalization(),\n", " keras.layers.Conv1D(128, kernel_size=2, padding=\"causal\", activation=\"relu\", dilation_rate=16),\n", " keras.layers.BatchNormalization(),\n", " keras.layers.LSTM(256, return_sequences=True),\n", " keras.layers.Dense(n_notes, activation=\"softmax\")\n", "])\n", "\n", "model.summary()" ], "id": "f2e6ee5949d5c394", "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/home/andrei0016/miniconda3/lib/python3.12/site-packages/keras/src/layers/core/embedding.py:93: UserWarning: Do not pass an `input_shape`/`input_dim` argument to a layer. When using Sequential models, prefer using an `Input(shape)` object as the first layer in the model instead.\n", " super().__init__(**kwargs)\n" ] }, { "data": { "text/plain": [ "\u001B[1mModel: \"Music_RNN\"\u001B[0m\n" ], "text/html": [ "
Model: \"Music_RNN\"\n",
"\n"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
"┃\u001B[1m \u001B[0m\u001B[1mLayer (type) \u001B[0m\u001B[1m \u001B[0m┃\u001B[1m \u001B[0m\u001B[1mOutput Shape \u001B[0m\u001B[1m \u001B[0m┃\u001B[1m \u001B[0m\u001B[1m Param #\u001B[0m\u001B[1m \u001B[0m┃\n",
"┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
"│ embedding (\u001B[38;5;33mEmbedding\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m5\u001B[0m) │ \u001B[38;5;34m235\u001B[0m │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d (\u001B[38;5;33mConv1D\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m32\u001B[0m) │ \u001B[38;5;34m352\u001B[0m │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m32\u001B[0m) │ \u001B[38;5;34m128\u001B[0m │\n",
"│ (\u001B[38;5;33mBatchNormalization\u001B[0m) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_1 (\u001B[38;5;33mConv1D\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m48\u001B[0m) │ \u001B[38;5;34m3,120\u001B[0m │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_1 │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m48\u001B[0m) │ \u001B[38;5;34m192\u001B[0m │\n",
"│ (\u001B[38;5;33mBatchNormalization\u001B[0m) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_2 (\u001B[38;5;33mConv1D\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m64\u001B[0m) │ \u001B[38;5;34m6,208\u001B[0m │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_2 │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m64\u001B[0m) │ \u001B[38;5;34m256\u001B[0m │\n",
"│ (\u001B[38;5;33mBatchNormalization\u001B[0m) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_3 (\u001B[38;5;33mConv1D\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m96\u001B[0m) │ \u001B[38;5;34m12,384\u001B[0m │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_3 │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m96\u001B[0m) │ \u001B[38;5;34m384\u001B[0m │\n",
"│ (\u001B[38;5;33mBatchNormalization\u001B[0m) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_4 (\u001B[38;5;33mConv1D\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m128\u001B[0m) │ \u001B[38;5;34m24,704\u001B[0m │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_4 │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m128\u001B[0m) │ \u001B[38;5;34m512\u001B[0m │\n",
"│ (\u001B[38;5;33mBatchNormalization\u001B[0m) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ lstm (\u001B[38;5;33mLSTM\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m256\u001B[0m) │ \u001B[38;5;34m394,240\u001B[0m │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ dense (\u001B[38;5;33mDense\u001B[0m) │ (\u001B[38;5;45mNone\u001B[0m, \u001B[38;5;45mNone\u001B[0m, \u001B[38;5;34m47\u001B[0m) │ \u001B[38;5;34m12,079\u001B[0m │\n",
"└─────────────────────────────────┴────────────────────────┴───────────────┘\n"
],
"text/html": [
"┏━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━━━━━━━━━━┳━━━━━━━━━━━━━━━┓\n",
"┃ Layer (type) ┃ Output Shape ┃ Param # ┃\n",
"┡━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━━━━━━━━━━╇━━━━━━━━━━━━━━━┩\n",
"│ embedding (Embedding) │ (None, None, 5) │ 235 │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d (Conv1D) │ (None, None, 32) │ 352 │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization │ (None, None, 32) │ 128 │\n",
"│ (BatchNormalization) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_1 (Conv1D) │ (None, None, 48) │ 3,120 │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_1 │ (None, None, 48) │ 192 │\n",
"│ (BatchNormalization) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_2 (Conv1D) │ (None, None, 64) │ 6,208 │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_2 │ (None, None, 64) │ 256 │\n",
"│ (BatchNormalization) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_3 (Conv1D) │ (None, None, 96) │ 12,384 │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_3 │ (None, None, 96) │ 384 │\n",
"│ (BatchNormalization) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ conv1d_4 (Conv1D) │ (None, None, 128) │ 24,704 │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ batch_normalization_4 │ (None, None, 128) │ 512 │\n",
"│ (BatchNormalization) │ │ │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ lstm (LSTM) │ (None, None, 256) │ 394,240 │\n",
"├─────────────────────────────────┼────────────────────────┼───────────────┤\n",
"│ dense (Dense) │ (None, None, 47) │ 12,079 │\n",
"└─────────────────────────────────┴────────────────────────┴───────────────┘\n",
"\n"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"text/plain": [
"\u001B[1m Total params: \u001B[0m\u001B[38;5;34m454,794\u001B[0m (1.73 MB)\n"
],
"text/html": [
"Total params: 454,794 (1.73 MB)\n", "\n" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/plain": [ "\u001B[1m Trainable params: \u001B[0m\u001B[38;5;34m454,058\u001B[0m (1.73 MB)\n" ], "text/html": [ "
Trainable params: 454,058 (1.73 MB)\n", "\n" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/plain": [ "\u001B[1m Non-trainable params: \u001B[0m\u001B[38;5;34m736\u001B[0m (2.88 KB)\n" ], "text/html": [ "
Non-trainable params: 736 (2.88 KB)\n", "\n" ] }, "metadata": {}, "output_type": "display_data" } ], "execution_count": 6 }, { "metadata": { "ExecuteTime": { "end_time": "2025-06-13T18:39:05.590442Z", "start_time": "2025-06-13T18:30:32.654092Z" } }, "cell_type": "code", "source": [ "optimizer = tf.keras.optimizers.Nadam(learning_rate=1e-3)\n", "model.compile(loss=\"sparse_categorical_crossentropy\", optimizer=optimizer, metrics=[\"accuracy\"])\n", "model.fit(train_set, epochs=20, validation_data=valid_set)" ], "id": "f92ec5b749cda8d6", "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/20\n", " 98/Unknown \u001B[1m28s\u001B[0m 223ms/step - accuracy: 0.3137 - loss: 2.6649" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:31:00.707129: I tensorflow/core/framework/local_rendezvous.cc:407] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n", "\t [[{{node IteratorGetNext}}]]\n", "/home/andrei0016/miniconda3/lib/python3.12/contextlib.py:158: UserWarning: Your input ran out of data; interrupting training. Make sure that your dataset or generator can generate at least `steps_per_epoch * epochs` batches. You may need to use the `.repeat()` function when building your dataset.\n", " self.gen.throw(value)\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m32s\u001B[0m 260ms/step - accuracy: 0.3156 - loss: 2.6569 - val_accuracy: 0.0786 - val_loss: 3.7352\n", "Epoch 2/20\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:31:04.276111: I tensorflow/core/framework/local_rendezvous.cc:407] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n", "\t [[{{node IteratorGetNext}}]]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 254ms/step - accuracy: 0.7535 - loss: 0.9438 - val_accuracy: 0.1163 - val_loss: 3.4637\n", "Epoch 3/20\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:31:29.153759: I tensorflow/core/framework/local_rendezvous.cc:407] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n", "\t [[{{node IteratorGetNext}}]]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 254ms/step - accuracy: 0.7917 - loss: 0.7477 - val_accuracy: 0.1727 - val_loss: 3.1308\n", "Epoch 4/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m26s\u001B[0m 264ms/step - accuracy: 0.8093 - loss: 0.6566 - val_accuracy: 0.2591 - val_loss: 2.5800\n", "Epoch 5/20\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:32:19.866931: I tensorflow/core/framework/local_rendezvous.cc:407] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n", "\t [[{{node IteratorGetNext}}]]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 254ms/step - accuracy: 0.8241 - loss: 0.5948 - val_accuracy: 0.5689 - val_loss: 1.4124\n", "Epoch 6/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 258ms/step - accuracy: 0.8339 - loss: 0.5554 - val_accuracy: 0.7331 - val_loss: 0.8900\n", "Epoch 7/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 260ms/step - accuracy: 0.8438 - loss: 0.5161 - val_accuracy: 0.7983 - val_loss: 0.6799\n", "Epoch 8/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m26s\u001B[0m 262ms/step - accuracy: 0.8522 - loss: 0.4868 - val_accuracy: 0.8114 - val_loss: 0.6398\n", "Epoch 9/20\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:34:01.066670: I tensorflow/core/framework/local_rendezvous.cc:407] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n", "\t [[{{node IteratorGetNext}}]]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 259ms/step - accuracy: 0.8592 - loss: 0.4599 - val_accuracy: 0.8212 - val_loss: 0.6050\n", "Epoch 10/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 254ms/step - accuracy: 0.8688 - loss: 0.4258 - val_accuracy: 0.8226 - val_loss: 0.6068\n", "Epoch 11/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 260ms/step - accuracy: 0.8758 - loss: 0.4037 - val_accuracy: 0.8244 - val_loss: 0.6088\n", "Epoch 12/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m26s\u001B[0m 261ms/step - accuracy: 0.8813 - loss: 0.3820 - val_accuracy: 0.8236 - val_loss: 0.6096\n", "Epoch 13/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m26s\u001B[0m 261ms/step - accuracy: 0.8891 - loss: 0.3574 - val_accuracy: 0.8212 - val_loss: 0.6176\n", "Epoch 14/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m26s\u001B[0m 261ms/step - accuracy: 0.8954 - loss: 0.3381 - val_accuracy: 0.8231 - val_loss: 0.6219\n", "Epoch 15/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m26s\u001B[0m 262ms/step - accuracy: 0.9038 - loss: 0.3131 - val_accuracy: 0.8211 - val_loss: 0.6362\n", "Epoch 16/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m26s\u001B[0m 262ms/step - accuracy: 0.9090 - loss: 0.2961 - val_accuracy: 0.8198 - val_loss: 0.6408\n", "Epoch 17/20\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "2025-06-13 21:37:24.919507: I tensorflow/core/framework/local_rendezvous.cc:407] Local rendezvous is aborting with status: OUT_OF_RANGE: End of sequence\n", "\t [[{{node IteratorGetNext}}]]\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 254ms/step - accuracy: 0.9140 - loss: 0.2782 - val_accuracy: 0.8175 - val_loss: 0.6539\n", "Epoch 18/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 256ms/step - accuracy: 0.9198 - loss: 0.2611 - val_accuracy: 0.8178 - val_loss: 0.6626\n", "Epoch 19/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 258ms/step - accuracy: 0.9250 - loss: 0.2443 - val_accuracy: 0.8115 - val_loss: 0.6911\n", "Epoch 20/20\n", "\u001B[1m98/98\u001B[0m \u001B[32m━━━━━━━━━━━━━━━━━━━━\u001B[0m\u001B[37m\u001B[0m \u001B[1m25s\u001B[0m 259ms/step - accuracy: 0.9269 - loss: 0.2380 - val_accuracy: 0.8130 - val_loss: 0.6963\n" ] }, { "data": { "text/plain": [ "