{ "cells": [ { "cell_type": "markdown", "metadata": {}, "source": [ "# Summary" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "This notebook shows how to train Denoising Difussion Models.\n", "\n", "The code has been adapted and curated from [this tutorial by Andras Beres](https://colab.research.google.com/github/keras-team/keras-io/blob/master/examples/generative/ipynb/ddim.ipynb#scrollTo=XtxiDuXZGcAu). " ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Hyperparams" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import numpy as np\n", "import math\n", "import tensorflow as tf\n", "import tensorflow_datasets as tfds\n", "import tensorflow_addons as tfa\n", "import matplotlib.pyplot as plt\n", "import os\n", "\n", "# data\n", "diffusion_steps = 20\n", "image_size = 32\n", "\n", "# sampling\n", "min_signal_rate = 0.02\n", "max_signal_rate = 0.95\n", "\n", "# optimization\n", "batch_size = 64\n", "num_epochs = 10\n", "learning_rate = 1e-3\n", "weight_decay = 1e-4\n", "ema = 0.999" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Dataset" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "def preprocess_image(data):\n", " # center crop image\n", " height = tf.shape(data[\"image\"])[0]\n", " width = tf.shape(data[\"image\"])[1]\n", " crop_size = tf.minimum(height, width)\n", " image = tf.image.crop_to_bounding_box(\n", " data[\"image\"],\n", " (height - crop_size) // 2,\n", " (width - crop_size) // 2,\n", " crop_size,\n", " crop_size,\n", " )\n", " # resize and clip\n", " # for image downsampling it is important to turn on antialiasing\n", " image = tf.image.resize(image, size=[image_size, image_size], antialias=True)\n", " return tf.clip_by_value(image / 255.0, 0.0, 1.0)\n", "\n", "def prepare_dataset(split):\n", " return (\n", " tfds.load('mnist', split=split, shuffle_files=True)\n", " .map(preprocess_image, num_parallel_calls=tf.data.AUTOTUNE)\n", " .cache()\n", " .repeat(1)\n", " .shuffle(10000)\n", " .batch(batch_size, drop_remainder=True)\n", " .prefetch(buffer_size=tf.data.AUTOTUNE)\n", " )\n", "\n", "# load dataset\n", "train_dataset = prepare_dataset(\"train\")\n", "val_dataset = prepare_dataset(\"test\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Denoising Network\n", "\n", "We will use the [Residual U-Net](https://www.sciencedirect.com/science/article/abs/pii/S0924271620300149) model." ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## TODO: can we use something simpler?" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "embedding_max_frequency = 1000.0\n", "embedding_dims = 64\n", "\n", "def sinusoidal_embedding(x):\n", " embedding_min_frequency = 1.0\n", " frequencies = tf.exp(\n", " tf.linspace(\n", " tf.math.log(embedding_min_frequency),\n", " tf.math.log(embedding_max_frequency),\n", " embedding_dims // 2,\n", " )\n", " )\n", " angular_speeds = 2.0 * math.pi * frequencies\n", " embeddings = tf.concat(\n", " [tf.sin(angular_speeds * x), tf.cos(angular_speeds * x)], axis=3\n", " )\n", " return embeddings" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Custom Residual Network" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [], "source": [ "def get_network_custom(image_size, block_depth=17, output_channels=1):\n", " # use the correct number of channels\n", " noisy_images = tf.keras.Input(shape=(image_size, image_size, output_channels))\n", " noise_variances = tf.keras.Input(shape=(1, 1, 1))\n", "\n", " e = tf.keras.layers.Lambda(sinusoidal_embedding)(noise_variances)\n", " e = tf.keras.layers.UpSampling2D(size=image_size, interpolation=\"nearest\")(e)\n", " x = tf.keras.layers.Conv2D(32, kernel_size=1)(noisy_images)\n", " x = tf.keras.layers.Concatenate()([x, e])\n", "\n", " x = tf.keras.layers.Conv2D(64, 3, padding='same', activation=tf.nn.relu)(x)\n", " for layers in range(2, block_depth+1):\n", " x = tf.keras.layers.BatchNormalization(center=False, scale=False)(x)\n", " x = tf.keras.layers.Conv2D(\n", " 64, 3,\n", " padding='same', name='conv%d' % layers,\n", " activation=tf.keras.activations.swish,\n", " use_bias=False\n", " )(x)\n", "\n", " x = tf.keras.layers.Conv2D(output_channels, kernel_size=1, kernel_initializer=\"zeros\")(x)\n", " return tf.keras.Model([noisy_images, noise_variances], x, name=\"simple-residual-net\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Residual U-Net" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [], "source": [ "widths = [32, 64, 96, 128]\n", "block_depth = 2\n", "\n", "def ResidualBlock(width):\n", " def apply(x):\n", " input_width = x.shape[3]\n", " if input_width == width:\n", " residual = x\n", " else:\n", " residual = tf.keras.layers.Conv2D(width, kernel_size=1)(x)\n", " x = tf.keras.layers.BatchNormalization(center=False, scale=False)(x)\n", " x = tf.keras.layers.Conv2D(\n", " width, kernel_size=3, padding=\"same\", activation=tf.keras.activations.swish\n", " )(x)\n", " x = tf.keras.layers.Conv2D(width, kernel_size=3, padding=\"same\")(x)\n", " x = tf.keras.layers.Add()([x, residual])\n", " return x\n", "\n", " return apply\n", "\n", "\n", "def DownBlock(width, block_depth):\n", " def apply(x):\n", " x, skips = x\n", " for _ in range(block_depth):\n", " x = ResidualBlock(width)(x)\n", " skips.append(x)\n", " x = tf.keras.layers.AveragePooling2D(pool_size=2)(x)\n", " return x\n", "\n", " return apply\n", "\n", "\n", "def UpBlock(width, block_depth):\n", " def apply(x):\n", " x, skips = x\n", " x = tf.keras.layers.UpSampling2D(size=2, interpolation=\"bilinear\")(x)\n", " for _ in range(block_depth):\n", " x = tf.keras.layers.Concatenate()([x, skips.pop()])\n", " x = ResidualBlock(width)(x)\n", " return x\n", "\n", " return apply\n", "\n", "\n", "def get_network(image_size, widths, block_depth):\n", " # use the correct number of channels\n", " noisy_images = tf.keras.Input(shape=(image_size, image_size, 1))\n", " noise_variances = tf.keras.Input(shape=(1, 1, 1))\n", "\n", " e = tf.keras.layers.Lambda(sinusoidal_embedding)(noise_variances)\n", " e = tf.keras.layers.UpSampling2D(size=image_size, interpolation=\"nearest\")(e)\n", "\n", " x = tf.keras.layers.Conv2D(widths[0], kernel_size=1)(noisy_images)\n", " x = tf.keras.layers.Concatenate()([x, e])\n", "\n", " skips = []\n", " for width in widths[:-1]:\n", " x = DownBlock(width, block_depth)([x, skips])\n", "\n", " for _ in range(block_depth):\n", " x = ResidualBlock(widths[-1])(x)\n", "\n", " for width in reversed(widths[:-1]):\n", " x = UpBlock(width, block_depth)([x, skips])\n", "\n", " x = tf.keras.layers.Conv2D(1, kernel_size=1, kernel_initializer=\"zeros\")(x)\n", "\n", " return tf.keras.Model([noisy_images, noise_variances], x, name=\"residual_unet\")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Difussion Model" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [], "source": [ "class DiffusionModel(tf.keras.Model):\n", " def __init__(self, network):\n", " super().__init__()\n", " self.normalizer = tf.keras.layers.Normalization()\n", " self.network = network\n", " self.ema_network = tf.keras.models.clone_model(self.network)\n", "\n", " def compile(self, **kwargs):\n", " super().compile(**kwargs)\n", " self.noise_loss_tracker = tf.keras.metrics.Mean(name=\"n_loss\")\n", " self.image_loss_tracker = tf.keras.metrics.Mean(name=\"i_loss\")\n", "\n", " @property\n", " def metrics(self):\n", " return [self.noise_loss_tracker, self.image_loss_tracker]\n", "\n", " def denormalize(self, images):\n", " images = self.normalizer.mean + images * self.normalizer.variance**0.5\n", " return tf.clip_by_value(images, 0.0, 1.0)\n", "\n", " def diffusion_schedule(self, diffusion_times):\n", " # diffusion times -> angles\n", " start_angle = tf.acos(max_signal_rate)\n", " end_angle = tf.acos(min_signal_rate)\n", " diffusion_angles = start_angle + diffusion_times * (end_angle - start_angle)\n", " # angles -> signal and noise rates\n", " signal_rates = tf.cos(diffusion_angles)\n", " noise_rates = tf.sin(diffusion_angles)\n", " # note that their squared sum is always: sin^2(x) + cos^2(x) = 1\n", " return noise_rates, signal_rates\n", "\n", " def denoise(self, noisy_images, noise_rates, signal_rates, training):\n", " # the exponential moving average weights are used at evaluation\n", " if training:\n", " network = self.network\n", " else:\n", " network = self.ema_network\n", " # predict noise component and calculate the image component using it\n", " pred_noises = network([noisy_images, noise_rates**2], training=training)\n", " pred_images = (noisy_images - noise_rates * pred_noises) / signal_rates\n", " return pred_noises, pred_images\n", "\n", " def reverse_diffusion(self, initial_noise, steps):\n", " # reverse diffusion = sampling\n", " batch = initial_noise.shape[0]\n", " step_size = 1.0 / steps\n", "\n", " # important line:\n", " # at the first sampling step, the \"noisy image\" is pure noise\n", " # but its signal rate is assumed to be nonzero (min_signal_rate)\n", " next_noisy_images = initial_noise\n", " for step in range(diffusion_steps):\n", " noisy_images = next_noisy_images\n", " diffusion_times = tf.ones((batch, 1, 1, 1)) - step * step_size\n", " noise_rates, signal_rates = self.diffusion_schedule(diffusion_times)\n", " pred_noises, pred_images = self.denoise(\n", " noisy_images, noise_rates, signal_rates, training=False\n", " )\n", "\n", " # this new noisy image will be used in the next step\n", " next_diffusion_times = diffusion_times - step_size\n", " next_noise_rates, next_signal_rates = self.diffusion_schedule(\n", " next_diffusion_times\n", " )\n", " next_noisy_images = (\n", " next_signal_rates * pred_images + next_noise_rates * pred_noises\n", " )\n", " return pred_images\n", "\n", " def generate(self, num_images, steps):\n", " # noise -> images -> denormalized images\n", " initial_noise = tf.random.normal(shape=(num_images, image_size, image_size, 1))\n", " generated_images = self.reverse_diffusion(initial_noise, steps)\n", " generated_images = self.denormalize(generated_images)\n", " return generated_images\n", "\n", " def train_step(self, images):\n", " # normalize images to have standard deviation of 1, like the noises\n", " images = self.normalizer(images, training=True)\n", " noises = tf.random.normal(shape=images.shape)\n", " diffusion_times = tf.random.uniform(\n", " shape=(batch_size, 1, 1, 1), minval=0.0, maxval=1.0\n", " )\n", " noise_rates, signal_rates = self.diffusion_schedule(diffusion_times)\n", "\n", " # mix the images with noises accordingly\n", " noisy_images = signal_rates * images + noise_rates * noises\n", "\n", " with tf.GradientTape() as tape:\n", " # train the network to separate noisy images to their components\n", " pred_noises, pred_images = self.denoise(\n", " noisy_images, noise_rates, signal_rates, training=True\n", " )\n", "\n", " noise_loss = self.loss(noises, pred_noises) # used for training\n", " image_loss = self.loss(images, pred_images) # only used as metric\n", "\n", " gradients = tape.gradient(noise_loss, self.network.trainable_weights)\n", " self.optimizer.apply_gradients(zip(gradients, self.network.trainable_weights))\n", "\n", " self.noise_loss_tracker.update_state(noise_loss)\n", " self.image_loss_tracker.update_state(image_loss)\n", "\n", " # track the exponential moving averages of weights\n", " for weight, ema_weight in zip(self.network.weights, self.ema_network.weights):\n", " ema_weight.assign(ema * ema_weight + (1 - ema) * weight)\n", "\n", " return {m.name: m.result() for m in self.metrics}" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Complete Model\n", "\n", "Chose one of the residual networks." ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Model: \"simple-residual-net\"\n", "__________________________________________________________________________________________________\n", " Layer (type) Output Shape Param # Connected to \n", "==================================================================================================\n", " input_2 (InputLayer) [(None, 1, 1, 1)] 0 [] \n", " \n", " input_1 (InputLayer) [(None, 32, 32, 1)] 0 [] \n", " \n", " lambda (Lambda) (None, 1, 1, 64) 0 ['input_2[0][0]'] \n", " \n", " conv2d (Conv2D) (None, 32, 32, 32) 64 ['input_1[0][0]'] \n", " \n", " up_sampling2d (UpSampling2D) (None, 32, 32, 64) 0 ['lambda[0][0]'] \n", " \n", " concatenate (Concatenate) (None, 32, 32, 96) 0 ['conv2d[0][0]', \n", " 'up_sampling2d[0][0]'] \n", " \n", " conv2d_1 (Conv2D) (None, 32, 32, 64) 55360 ['concatenate[0][0]'] \n", " \n", " batch_normalization (BatchNorm (None, 32, 32, 64) 128 ['conv2d_1[0][0]'] \n", " alization) \n", " \n", " conv2 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization[0][0]'] \n", " \n", " batch_normalization_1 (BatchNo (None, 32, 32, 64) 128 ['conv2[0][0]'] \n", " rmalization) \n", " \n", " conv3 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_1[0][0]'] \n", " \n", " batch_normalization_2 (BatchNo (None, 32, 32, 64) 128 ['conv3[0][0]'] \n", " rmalization) \n", " \n", " conv4 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_2[0][0]'] \n", " \n", " batch_normalization_3 (BatchNo (None, 32, 32, 64) 128 ['conv4[0][0]'] \n", " rmalization) \n", " \n", " conv5 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_3[0][0]'] \n", " \n", " batch_normalization_4 (BatchNo (None, 32, 32, 64) 128 ['conv5[0][0]'] \n", " rmalization) \n", " \n", " conv6 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_4[0][0]'] \n", " \n", " batch_normalization_5 (BatchNo (None, 32, 32, 64) 128 ['conv6[0][0]'] \n", " rmalization) \n", " \n", " conv7 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_5[0][0]'] \n", " \n", " batch_normalization_6 (BatchNo (None, 32, 32, 64) 128 ['conv7[0][0]'] \n", " rmalization) \n", " \n", " conv8 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_6[0][0]'] \n", " \n", " batch_normalization_7 (BatchNo (None, 32, 32, 64) 128 ['conv8[0][0]'] \n", " rmalization) \n", " \n", " conv9 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_7[0][0]'] \n", " \n", " batch_normalization_8 (BatchNo (None, 32, 32, 64) 128 ['conv9[0][0]'] \n", " rmalization) \n", " \n", " conv10 (Conv2D) (None, 32, 32, 64) 36864 ['batch_normalization_8[0][0]'] \n", " \n", " conv2d_2 (Conv2D) (None, 32, 32, 1) 65 ['conv10[0][0]'] \n", " \n", "==================================================================================================\n", "Total params: 388,417\n", "Trainable params: 387,265\n", "Non-trainable params: 1,152\n", "__________________________________________________________________________________________________\n", "None\n" ] } ], "source": [ "network = get_network_custom(image_size,block_depth=10)\n", "# network = get_network(image_size,widths,block_depth)\n", "print(network.summary())" ] }, { "cell_type": "code", "execution_count": 10, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/10\n", "937/937 [==============================] - 2197s 2s/step - n_loss: 0.1291 - i_loss: 0.3230\n", "Epoch 2/10\n", "937/937 [==============================] - 1940s 2s/step - n_loss: 0.0934 - i_loss: 0.2160\n", "Epoch 3/10\n", "937/937 [==============================] - 1929s 2s/step - n_loss: 0.0881 - i_loss: 0.2033\n", "Epoch 4/10\n", "937/937 [==============================] - 1960s 2s/step - n_loss: 0.0864 - i_loss: 0.1974\n", "Epoch 5/10\n", "937/937 [==============================] - 1923s 2s/step - n_loss: 0.0841 - i_loss: 0.1926\n", "Epoch 6/10\n", "937/937 [==============================] - 1923s 2s/step - n_loss: 0.0835 - i_loss: 0.1908\n", "Epoch 7/10\n", "937/937 [==============================] - 1926s 2s/step - n_loss: 0.0823 - i_loss: 0.1864\n", "Epoch 8/10\n", "937/937 [==============================] - 1948s 2s/step - n_loss: 0.0813 - i_loss: 0.1817\n", "Epoch 9/10\n", "937/937 [==============================] - 1923s 2s/step - n_loss: 0.0813 - i_loss: 0.1825\n", "Epoch 10/10\n", "937/937 [==============================] - 1927s 2s/step - n_loss: 0.0801 - i_loss: 0.1787\n" ] }, { "data": { "text/plain": [ "" ] }, "execution_count": 10, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model = DiffusionModel(network)\n", "model.compile(\n", " optimizer=tfa.optimizers.AdamW(learning_rate=learning_rate, weight_decay=weight_decay),\n", " loss=tf.keras.losses.mean_absolute_error,\n", ")\n", "model.normalizer.adapt(train_dataset)\n", "model.fit(\n", " train_dataset,\n", " epochs=num_epochs,\n", ")" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Visualize" ] }, { "cell_type": "code", "execution_count": 13, "metadata": {}, "outputs": [ { "data": { "image/png": "iVBORw0KGgoAAAANSUhEUgAAAk0AAAGFCAYAAADgqcccAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjUuMiwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8qNh9FAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAmv0lEQVR4nO3deZxcdZnv8V9V9Zp00t3ZOjvZFxbDviMKhk0RBhQILgOMzOjovaPzckav1+XqdV7OoqMzzr3OS0cUFZFNwMEogwiCQCALhIQskL2TkL3TSaeXdC33D3zdGX2+hzwnXVV9qvrz/vPLQ/WpU+dUPzmv39O/VKFQKAQAAAC8qfRgHwAAAEAloGkCAABwoGkCAABwoGkCAABwoGkCAABwoGkCAABwoGkCAABwoGkCAABwqPEWLky/t5THgWNJpWzG3yU9bo/l7zvu/3dh5gb9H0r1eajPPi6uFUQY0L1Qib8X0hmd53PlPQ4cl1R9vcwLfX0Dfm3PvcCTJgAAAAeaJgAAAAeaJgAAAAeaJgAAAAf3QnAAv1PuRdUs4gaKJykLvofqcM8A33cxFnwPBE+aAAAAHGiaAAAAHGiaAAAAHGiaAAAAHFgIXg7FWPA3FBYIAkkk7t9UXZ0sLRw9ql+D+zc5ov7CvvqM4tTGNVSviXLvnFDkn8eTJgAAAAeaJgAAAAeaJgAAAAeaJgAAAAeaJgAAAAem58qh3FMZpVKJxwx4RVzfqdNPNNnrFzbL2tGv6C0eah9fYcNquG9K9Z2Qzugfl9F5ISe2RonaLiXOsVXDZzRUlOmz4kkTAACAA00TAACAA00TAACAA00TAACAAwvBB0slLjCsxGNGaVXRcEDN+DaZb77KLvq+7vqnZe2Pnzpf5rN/Lf59WohYqKwWQUctah5sRficUzX211DXNWfI2j1n6n/nj9xgs7bHd8ja7JZt/oMrhnLfIxE/LzNurMl6F0yVtUfaamVe25M3Wf2BrK7t6NXHsa/TZIXOQ7I213XEZKm0fn+FrD6OYuNJEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgAPTc5VCTURETF+oaZRyTRYkjToXKKIKnJKL0v2WyTIfd9FOk719xBpZ++O0np6LNf1WsBNK1Sw9bJjJdizU19VnL35Q5t9Yd4nJjq4fpX/e1nZ9IKW6lovxujEm8KKmQNsXzTDZWxctl7Wfbntc5n3irbzWP1rWPnV4rszvXn62yVpe1FN8bc/bqbr0Rj0VmevokHmx8aQJAADAgaYJAADAgaYJAADAgaYJAADAgaYJAADAgdGiIkrV1uk8I3rTtO5X893d+sVjTGAM1Uk5hXNRYmqftBCSu1daiJ6oPDhb37+3TXrRZA93nC5rxywtwr9Dq2gi0SM/9wSTzZ2jJ6TeM2KzzF+YZPOV0xbI2pYl+pot+3dFjInoONdE9oRxMr/pFjsR9/FRq2TtsHSTzLvydj+5CZnDsvac+iUyv33hsyZbdbE+5q9vWWiyw/fMk7Wj73hB5sX+LuJJEwAAgANNEwAAgANNEwAAgANNEwAAgAMLwY9FLNbLzJouS3e8a7zM+y+wfwo+l9P96th7G2U+/MFlNkzwYlsMERV4DaZH6+01Oufp93L+sNdM9i+rL5a1M5brrRyG1sYoWmbkSJm3v22EyT4+4TFZu/aoXqz/2JoTTTZvyW5Zm0vKcEiJFvxnDtnF2iGE8P0155js1vP1NiqZqG1bhP6g75vaVMTvODEYdX7DXll71ty7bO05n9Cve3eDzPNHjtgwxvv7QzxpAgAAcKBpAgAAcKBpAgAAcKBpAgAAcKBpAgAAcGB67hgys2eYbMNt+k++f/Kah2R+44iNJjuQ0xMH7/3VX8l8eAVOKQFGnK0jSvXzRjXL0gvPWCvzM+rtxFbtCr3NRGqHfg2EEOpqZdw9yc4WTqvT01R/v/0KmU9YbF87v6U9xsFVj8KW7TKf/o8zTXbR9k/K2tzofv3i/fY5S+0B3UbUdegJtZQYJc3qofGQa7DfDS3bIybfIn6nSgP4zuFJEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgAPTc7+TqtV7Gm27rs1kH37XL2Xt+0dsiXj1jEmW9ul96oa/HjEBkLavUYn7fmGIK+WknJDK2Psm36Dv9S9OXCzz/+huNdmEJT2yNn/4cIyjiyFqr6wyn8+BKHTrc1bXYf/t/sCBs2Tt2sdny3z6E3Z/wFycaaoqku/u1v9h2RoTzVqpW4BUnb5HUmLfuJCPuAbTEdNzjXZUrtCjr40gfi8XevXeevmIvNh40gQAAOBA0wQAAOBA0wQAAOBA0wQAAOAwNBeCi0XVufNPkqUzrtxkskUjX5a1mVS9zO/vsou+P7/4vbJ23ovbZJ5Vi76rYHEoBqDcW5JUopT9d2FuuN7OY3qt3hrlczsuMFndht2yNluqxcdJ/VxjfAflIxb7Tny6z2Qv7DhN1k5d3SXz3L59rmMY0sTvkEKfvl4LffYzKZqDnaV77TLgSRMAAIADTRMAAIADTRMAAIADTRMAAIADTRMAAIBDcqfnSjgZlpk7w2Sv3qxPxSMnPGiy5rT+E/P/sP8Umd91/yUmm/eD7bI2u2OnzCWmQypXMSbfVD0Tlb8n1WAnWvub9fRclJZaO/V1IKu/AxJ9ntVWTAMV5/1G1NY+s9pkY5fq8xu13UYhyecdVYUnTQAAAA40TQAAAA40TQAAAA40TQAAAA7JXQhehIV96REjZL7zHWNN9qmLHpa1c2rtgsTXc3ox4h1PXizzeXfvMll2i94uJRJbZlSXUn12SbkmErIgvdBrt4NI9+Vl7aVr3i3zXZ32e+SEPj3IkWhqK6YEUFt2RG7jEXVdxakVW+u8Edv6Qjbr/3kYEnjSBAAA4EDTBAAA4EDTBAAA4EDTBAAA4EDTBAAA4JDc6bkiyJ4+S+YNV+4x2a3NW2RtbcpuubCpf6SsHbVS96CF9hhbo0QZ6NRRQqaZUAGKca0k5Loq9B81WeM6O80aQgg9fzte5uPFsF3+iJ6gRYkV4xos6CnCgh6qrB7ivs60tOjSYY0yz+583YYJudfLhSdNAAAADjRNAAAADjRNAAAADjRNAAAADjRNAAAADlU9Pbf18gaZf2XWAyZLR/SP3Xk7fXPn3ktlbeuGXpnne3VeVkNswgEDUOXXSnb7DpnXRuRKdZ+hMmNfzaJK1ehf67lzTzbZazfWy9qF57ws8y0fs68Rlq3RB5LQvQ4HiidNAAAADjRNAAAADjRNAAAADjRNAAAADtWxEDxi24cRp+yX+TkNdluT2lSTrH3laNZkT740X9bOX71B5kVZDpeExZJsxVJaSTm/STkOHFs6o3O1CDeqdrCp64pr8PdFnI90o93uJHXCJFm7/uY6k33l0ntl7Sn1euuvD81aYLLmFfrYqnVbGp40AQAAONA0AQAAONA0AQAAONA0AQAAONA0AQAAOFTc9Jz6E/EHbzpT1v7VnHtkPjaj/3S88sMD55ls1It6CiW3/4D7dWNLwtRIEo6hmiXl/BbjOJh+Ko84W1VU0rYWSb9OxPWdqqnVtTHGyKK2QEk1DZd59zkzTLb1On3u7r/kX0x2cp2+T5/oaZF5bbd9L4VcBV1XRcCTJgAAAAeaJgAAAAeaJgAAAAeaJgAAAAeaJgAAAIfKm56rt5Nv/TfqqbULG9sjXkW8RkFPAJwzYqPJHjzpLFk77tQTZZ5/eb0Ih9bEAYaYONNPTNohATItzSY7etpMWbt/foPJcjZ643WP+o8hH7E9YHaYzmvO6TDZnW+5W9a+pc6+eFe+T9Y+eOB0mTe9ss9kuaTfp0Xet5UnTQAAAA40TQAAAA40TQAAAA40TQAAAA6VtxB8mF0Rd+WUtbK2LdMo83SwC8N6Cnq13jXD7cK3gwv/XdZ+Y/e1Mp/WbhcYFmXLFRbQDm1FXuA4aCrxmIuB+/cNZb6OM6NHybzj8jkmG/kn22Xtv834qcnmR+yiEqU/2GGgvogtV6KebjSk7OLuprRekZ4r2PN85yE9vPTsfafJfHL7iogjSbAiX0s8aQIAAHCgaQIAAHCgaQIAAHCgaQIAAHCgaQIAAHBI7PRcukFPAHSfNc1kIzJb9GuIKbkQQsikbK/YlNI/ryvfa7KR6R5Zm9KDD/HEmagZalM2SVHuqSemrKoTn98bSnUe0npPksNvnS3zmR9bZ7L/M/UXsrY5bSezN/d3ydqGiNt3XMZOgg9L6XNRK6bk4nql306I/9Nz75C18x/ZK/Ncr/19ONTwpAkAAMCBpgkAAMCBpgkAAMCBpgkAAMBh0BeCp4cPl3nP2/Sfd999i12I9r7mF2VtJtUkc7W4+/k+fRx/+uxHTDbuF/Wydtoz7TLPdnTKXGJxaPKV+zPimiiLTIvd7iiEEMKEcSbqH6O/W/J1+t+h9e0HTZZ7bZP+eXzeRRG1Xcrus/Vn9O2Ji01WG/Er8h8PzDDZnd+5QtbWd+rPs+/agyb74kl6i64rh3Xo107ZvVv25Y7I2r/edIPJJv9cLzDPv7ZZ5uBJEwAAgAtNEwAAgANNEwAAgANNEwAAgANNEwAAgENppudibPuQP8lOIYQQQvuirMzvPfO7JmvL2D9pH0IInXm93cnf7T3HZI/+6wWydu4Te0xW2LFL1ma7u2XONAwMdY/EvU6K8RoDVYFbvKRHjJD5lv92ksz/4Y/vMNmMmgOydm/ebo0RQgi3/uzDJpvzOfvdEkII+cOHZT5gUZ9VNRDvrfPtM2Xp+676jcxPqLG/Dr93SL/GPX9/uckm/eI1fWxi+5IQQtjVaCfE18yYJGvf3hixrYm4zxYfOUH/vAdtPvEpu3VMCCHksvr3L3jSBAAA4ELTBAAA4EDTBAAA4EDTBAAA4EDTBAAA4OCfnkvrPWpCIS+yiMkZ8Rp7ztSTLItOflrmJ9fZKYm1/f2y9mPrb5b5oUcmmGziA+tlbW7ffpnjOMWZtkrqZFYxjqsY72Gwz0NSjiGmwpypMr/6umdlfla9/Q5oTTfI2vF5PUFbP7nLZOkmvd9lyabnKvCz8ko32b0A95yu79O3j1gjczUp9/XF75S1cxa/arK4vytGbrUTarXpeFNrW8WU2z+9domsnfjobpPl9uspUETjSRMAAIADTRMAAIADTRMAAIADTRMAAICDfyF4PjfgH5YerrcYkD+uoBfxLeuzi8k/8vIHZe24b+jtVcY/t9xkub4+97FhAMq9WLoUknpcsMTwydFWvYj7spGrZN6UqjVZf0F/Hz7f16oPY9lIk+W7tspaHIec/TyaN+rS25bcIvOGlfb305xH9OLuWAuoIwZH6g7a7VWm1OrX7VcDVyGEH3acZ7L8L8fI2vzmZVFHiBh40gQAAOBA0wQAAOBA0wQAAOBA0wQAAOBA0wQAAODgn54rBrHdybhlesuAh0ZdKPP7h11gsslP6m1UapeulXmeSbnBU+6tUaJ+HoYGMfWbirjUdvTrybeuukMm25XT20r9xfIbZT7zZ/tMluuyW6vg+OS77fY1Y763VNaO/XG9zAtH7e+RXL+dcIstpZ9N9I22x9GS0dvwHM7ri/YnL59psnlP2GsthCK9F/CkCQAAwIOmCQAAwIGmCQAAwIGmCQAAwIGmCQAAwKGs03P53l4bvqD3e5q6Qh9aQU0RROyLp3frwaAqxpRcnAk89onDH6g9IL6HQgjPHJrtfo2vrLpS5hPu1JNZ+VfX2JBr8w3qfi7CuSlkszoX+9RF/swiTPumavXvsn2n2HxsRk+Tb8o2y7xppd1HMf/aavexVaRiTGAPYKqaJ00AAAAONE0AAAAONE0AAAAONE0AAAAO5d1GJYaoRXyoICVa4MkCWgxEeudemS/58Wky/02rzce/oLduanx6nczzfJ9FK/f9HOfnlfDY+lrta+/PDZe19+w7R+ZTfrrdZNmsvjarRtzPJG23PEqlWQgOAABQUjRNAAAADjRNAAAADjRNAAAADjRNAAAADomdnkMVGOiftWdKDiWQ271H5uO/ofM4Wy7kuWaTQ0xNhRAit90asKjrRG39FUKoP2DrF3cukLVPb54p81md22zINfj7xOddCBHXhgNPmgAAABxomgAAABxomgAAABxomgAAABxYCH4sLFAuD85pckQtaB2qn1ES3jefSXylWvAdJeKzKERsazJhSa/JfjH8LFnbsiHiR/b1+Y4t6cp9fQ/g2uBJEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgAPTc8fCdAqGGq55oHgi7qfMEytMNu2JeC+dP57jSaIK+s7hSRMAAIADTRMAAIADTRMAAIADTRMAAIADTRMAAIAD03NJE7UHj1JBEwc4tlSNvh0LuYh9kvj8K1uc/bb4rIFE4EkTAACAA00TAACAA00TAACAA00TAACAAwvBk6bcCz7jLEaNU4vYCtnsYB/CG4pxTUThWvlPVXAuUvX1Mi/09ZX5SDBo0hmdFyI2eYlz3cf9fhnoz3PgSRMAAIADTRMAAIADTRMAAIADTRMAAIADTRMAAIBDqlCoghEOAACAEuNJEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgEONt3Bh+r0D/2npjM0KeV1biVviqfcXQgj5XHmPA8f0WP6+4/5/i3IvAAlRUfdCKqXzSvx9gcTx3As8aQIAAHCgaQIAAHCgaQIAAHCgaQIAAHBwLwQvimpfEF3t768SRS0cRTQW2yKpuAYxyHjSBAAA4EDTBAAA4EDTBAAA4EDTBAAA4FDeheBAKakFzJW0cHSoLsBOyvuOOo6U/bdlurFBlua7u/VrVPtn+IcGei8m5ZoA/gBPmgAAABxomgAAABxomgAAABxomgAAABxomgAAAByYnkMyxNnuJGqCptIna5Jy/OU+jqS8bzElF0IINePGmKz3xMmytmHNdplnd+0+/uOqROozjTMRl5RrAvgDPGkCAABwoGkCAABwoGkCAABwoGkCAABwYCF4BUvVRHx8mYyJCv1ZXZvPFfGIBoCFn5W5dUTaXmshhJAZ2STzwtF+k+V7evRrl/l9p+tqZX743BNM1v5OfWyjls6Q+dg79puskI24J6tVkq/jYqjE+xex8aQJAADAgaYJAADAgaYJAADAgaYJAADAgaYJAADAobqn55I8zRDz2NINDSbb+77TZG3H23pNNvaX9bK29YGXZJ7vta+ReOqcJuGz9kr6sYrzW9M2Vpa2L9JTZI177XtsvXeFrC309cU4uBgi7r3U9Cky33WjPY6vnvGgrP3MnvfJfEw+4Z8tNLZ+OX5J/v07ADxpAgAAcKBpAgAAcKBpAgAAcKBpAgAAcKBpAgAAcKju6bkkr9KPOraIiYP0hDaT5d7dIWv/49R/M9k76z8ia1tf0VNO4aU1Ok+yJH/eVSAl9jQ8OmeCrL3plsdl/oMHLzXZ6EY7GRpCCLkSTc+lavQec3vPGyPzP5r3nMm2HtW1DfsiJoaSssdjElX61GulKPd5rtLPkCdNAAAADjRNAAAADjRNAAAADjRNAAAADtW9ELwCZZpHynzXZRNN9pE5D8vayTV2y5T/teARWfs3ly2S+cSXIg5QqdI/l18U1XRuxELwzul6EffHR62S+Z2ZS2xYWzegw4orPW2yzLPX6MGK61uWmuwv198oa9te6I74ofbcsTj8d5J8LyT52OKqpvcyiHjSBAAA4EDTBAAA4EDTBAAA4EDTBAAA4EDTBAAA4FAd03NxJ5TUJEscRZh6yYzUU3J7rj9R5m+5dbXJbhyxMeLV7cf6cvcUWdm8qQgTPHGmMqLOfTEmiZK4HcNg//wiSg8bZrLuNn3v5UNe5nWdoj6bHdBxvZlUjb0Xuk7UW6B8+aQfyHxLv63vedBuaxRCCE3LVsi8oK7vapqsLJdKPGeVeMyVqEznmSdNAAAADjRNAAAADjRNAAAADjRNAAAADjRNAAAADtUxPRd3dXyZ93zKjB5lsr3XzJW1Ez+4WebfnPyYyepTes+ufz04w2SP/OhCWTvlmU0yL9k8U5xzH3cagmmUkkq12InP3gV6r7WdOf05j32xz2T5riMDO7A3kZ5j74X2K3XtKXX7ZP7Bde83WdvT+2Vtrs++v0hcr/GV8pyVavqWz/nYijH5VqbzzJMmAAAAB5omAAAAB5omAAAAB5omAAAAh+pYCJ4QmdZWme+72i76nn/7K7L2q5N/IfOc6G8/s/tMWbv4vvNMNu3+dlmbfX2XzBOBBZTFE2Ohpdp6JIQQeqfb7URuOvEFWdsS8c+xTK9dIF6IWDQeR7qhQea7Lh5tsi+87QFZezivt/jZ8dIEk81+fV2Mo0PRlHKrjHJ/3yRx26djYbE8T5oAAAA8aJoAAAAcaJoAAAAcaJoAAAAcaJoAAAAcmJ47DpmWZpm/vmi+zD/w57802a3Nq2XtpmytzG9d+ccma/qJPo5pT240WXbXblmLISLGdEpmjJ04CyGEbefVm+yGlqWy9tJlt8t86pa9JsvG3dZITPCkZkyVpQfPtduaXD5Mbx30kc3Xy3zKr/pNlus89GZHiFJJypRVMabIkvJe4qjEYy4ynjQBAAA40DQBAAA40DQBAAA40DQBAAA4sBD8GNLDh5vs4OV6wfcFty6X+Udb15vs/q4psvazT+jFqLN/eNQe2/PLZG02m5V51SjlVgpJVOb3mx+jtwPqm9djsjV9douREELI/LpFv/Y+sQg75vtL1dWZbMdCu8VLCCH8z7N/arKlfeNk7dZ7Z8p8whI7tJGLu3gd1UVdm2m9DU+mVQ/sBLF9UO5g50COqvQqceuXIuNJEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgAPTc7+jpuRCCOHwlSebrPH2nbL2f49/QuaPdtvJni/df4OsPfHb22We3dpusqE1s/BfDLFpjXK/33RXt8xHPjfKZJ/ffpOsnflUh8zzvb3+A4mYqktPnWSypit3ydpTG7aZbNHzH5K1sx7TWw3luo5EHSHw/6Vq9a/T7rP1VGbtYbs9T/qZlfrFk/Kdl5TjGEQ8aQIAAHCgaQIAAHCgaQIAAHCgaQIAAHCgaQIAAHAYktNzqRr7tnveeqKs7f7gQZPdMftuWfub3jaZ/+Wj7zPZ/G/ZqZ4QQshu3yHzIWmo7TGXEGpSM4QQ2r4rpsvy+rPI99u9EuNK19fL/NBbxprsa3P+r6x94OCZJhv5mJ6UzW9epw+EfebgkG7Re8ztvk1PjI4eYacym2+3k6EhhJDbs1fmhaNiT9LGRl3bb/ckTdXVytp0m73HQggh12rvnXSv3us0v2GLPo6+PplXCp40AQAAONA0AQAAONA0AQAAONA0AQAAOAzJheDpufbP2m+9Xi9oveeUH5msPTtS1n7i14tkPv9bB01WiQu+1QL6EEJIN+vzEdrs9jGpIz2yVC4+ZsH34Ig47+VewJmaMVXme0+3/9abXKOvq588c57J5j23X9bmirB4PXJ4QSnG9T3UhiUS/H5TEcd2xiQ9WPGhtqdM9snL/0zWjnt2mMy7ZreYrP3qvKxtHXfYZLNG7ZO17xrzW5mf2mC3+frEBr0lWN0n9fYxhZVrRTj4n58XT5oAAAAcaJoAAAAcaJoAAAAcaJoAAAAcaJoAAAAcqnp6Lmraa+u7R5vsixfeK2snZuzE0KUv/ImsnfdNO50QQgi5V9ZHHeLgi5j4yIyxk2/7r5wla49ce0jmE5ptvnvxFFk78Z/tFh2FYkwzIfHSI0bIfMdCew2GEMLCy5abbPGRObJ2zvftVhX5VzdFHEhGxqla+z2Smq8ngw7N1u+laWu3fY0VYooohFDI6m0pdHHlTB0VRYLfb6G/X+brD4yT+YIpXSb7H399l6zdldVbtFzQuMFkI9L6OJQjBf07sr+gn6ds6rf35M4OfWwz9h2QeT7Bn6EHT5oAAAAcaJoAAAAcaJoAAAAcaJoAAAAcaJoAAAAcqnp6LmpPtJFvtZNaVw/fJms//fpCk429r1HW5l9ZFePokkFNyYUQwrbbZpvsnTc9K2u/MPYFmf/g0HSTfbt7sj6Qgt4vCdUlPczuodVz0TxZe8J1esrt5OF2/6uv332trJ26/DkbRkzvZEbq74vec+1k3ub36KnTy059WeZPLT7NZNPW6u+RwmE9hYtkyx+y03AhhHBkib6+vzXVXhOfGq0nKjMpPaEcQr1JHjpip8NDCOFHu8412YqVegq0caeeJK0Vb7Flr/7uzndukXml40kTAACAA00TAACAA00TAACAA00TAACAQ1UvBA+t+s+7z27Za7Jc0ItDV+yx236MW75L1maT8ufhxXYQNZMmyNId106V+W0f+KXJ/rRljax9pLtN5n/39FUmm/9re+5DCCEXZ+uIShKxTU2St4OII93QoPMxejHqgYvs/dR8e7us/ezUf5f5bS/eYrJpD+ktG9JtdguLo7P0vbDlCr0w++arf2OyB0Ytk7VR3yMX99lFv6guUds+TbvXDh6FEMJPei412Ypr9TZTn5z0qMz/fPXNJss9ru+9cSt6TDZvnR62iBxGyNjfLYWjetuWfJVug8WTJgAAAAeaJgAAAAeaJgAAAAeaJgAAAAeaJgAAAIfqnp5L655w9V4xPROxu8f80XZSbucJs2RtZvNW96HFJqawMhHbxORn2jez6Wo9SXjL9Y/J/KOt6022Xg9JhE89cYPMZ/3ITsTlXt2oX6RaxZmSS/ikXc14OyXZvUBP++w+u07m77hmqck+3/akrG1I6a0cbp5lJ9fu+sKZ+jXq7HV/+rjXZO2nxyyR+Zn1du+IppSeGtyS7ZZ50w77GRZ67DQTyqDM91luwxaZT/5+h8k6n9fTzJ+tuV3mEzbusT9vj/55hb4+Wysr8WZ40gQAAOBA0wQAAOBA0wQAAOBA0wQAAOBQ3QvBd+ktO7K/OdlkP509W9ZeNWqVyT53xUmydvZq/efrc/v2mywzepSsDePHyrhvfJPJdp5eL2snXrHNZD+d+R1ZO79umMw39ttFg4uW/5msnfudiAWtK181USEhi5oHnVqMmpBzkxmpBwx2vmemyfov6ZS175y2XOafGfuMyZrSelF1lI+OeslkHzxL/7z+GKe0t6D/DdmQqjVZR15f83d3RixI77BLbgvVunVQ0pX7Psvr5da5/Xbrn/Rv9XZAUUp2BSV8KGXAot6fA0+aAAAAHGiaAAAAHGiaAAAAHGiaAAAAHGiaAAAAHKp6ei536JDMJz/8usm+Mv8qWfujS75tsivfYbdxCCGEF1adIfNRy1pN1v7ucbK27q37ZH7RxJUmu6n1eVl7cp3d76Q+pSftfnZET899+dX3mqzpIT1VFVatkHEhG7HvCpIzhSKmSPoX2Cm5EEKov8pu2fC9+XfJ2sk1eq5nWNpur7IvpyfRdufs1FoIIfQW7Gs8262nX1d1TTLZ1EY9ofRHI/V1rGzK6m1i7njyYpnPWyu2u3D/NKDMor6fEjz1G2vibwDHzJMmAAAAB5omAAAAB5omAAAAB5omAAAAB5omAAAAh6qenouS27jFZJMX62m2r8253GTfnf6wrD3zCrunXQgh9DfZ/eTOfY+dhgshhK9O+pXMh6XstE466GmBdWLDrQ+tvUHW9v68TeYTHt1lssIOfcz5PrtPXVKkakpwiVfRvkyZlhaTbbukUdZ+edYDJhuTiZqQ1Ofo4SNjTPap39pJzRBCaF2qp+dqxLBdKsYo2tOn6s+p9YojMp9eu9ZkS3vmyNqWNfrfofkt7c6jqyxR9xf76lWpJH/HlenYeNIEAADgQNMEAADgQNMEAADgQNMEAADgMCQXgisjntog822j7YLP+i/o0/als38m868Mv8Jknf0NsvaKVe+X+eEeW9/fn5G19c83maxtqd6qonXlapnnDh+2YZIXAZZTJZ6HiMXrfafPMNmCy9bJ2vMbdpqsWWyLEkIIz/XpxeSfevxGk839bresTa3W1+ZAFxm3rJ8v8x/OOVvm1y2wx7Gpxw53hBBCfWcFXhsDULIF31U0bFFSSd7WpNzKdM3wpAkAAMCBpgkAAMCBpgkAAMCBpgkAAMCBpgkAAMAhudNzpVwJL14jt/+ALB33230mez13VNZeNUxvlfDFPnua1z8wV9a2LdFbOTT32imV9NGI7Ut2bDVRrvOQLM1Fnc+o819h2M7hDamMnrQ8NNVOv/3zpJ/L2nGZYSbbltWTb3+x8laZz//mQZPl1m2UtYVCXuYD/Q6oad8r844DE2Wu/mWZTuljqO3RxyyvQybEonEOiq9Uk3ZJuY7ZRgUAACA5aJoAAAAcaJoAAAAcaJoAAAAcaJoAAAAckjs9l5CV96kOO3W26uh4WduS1pNEw3873GQT7td73eX26MkedXSxJt/ink+mV6pKIa8/z5o+m+/P633jMuKyer53iqzNrWzWx9G+yob5nKwtmbpanUcMAdWJ+6m/oKcR0/0x7hvuMQxUnGuoVNfbELuOedIEAADgQNMEAADgQNMEAADgQNMEAADgkNyF4AmR6zhosi997QOytpDWK0knPL7Hvm7Egu9SbRODIS5iS5LWpfbavPWhD8vaSSftNtme5/VQxPSHD8o8f0QPS5RTfoQdzAghhFRa3zfqX5abusbI2vr9EVsbDTWl2rIDGGQ8aQIAAHCgaQIAAHCgaQIAAHCgaQIAAHCgaQIAAHBgeu4YCn12Gmb8fet17dF+mecOHy7qMQGxRUwu5TZuNdncvzkoa1O1dvuRpu5X9OtGXfMJmKBKd+kJvsz2Vpn/923vMtn6p6fL2llbN8s86zy2qpGAzxkIIYSQFlseDWDrJp40AQAAONA0AQAAONA0AQAAONA0AQAAOJRmIbj6E/ohVM3iwNz+A4N9CBhM1XR9iwWRkdd3lWyNkdu+U+az/rZT5p3faDDZjCOrZW22q+v4DwwYCsr9PTKARd8KT5oAAAAcaJoAAAAcaJoAAAAcaJoAAAAcaJoAAAAcSjM9F2clfDEmkappmgnJl+TrqpT3QpLfdwyFrN7UJHdQT8+FEJWD797/YjDORZInWqv02uBJEwAAgANNEwAAgANNEwAAgANNEwAAgANNEwAAgENppufiGApTPUmecCiGKp2SqEicc5QT19t/ijoX6YzOi7EnGue/7HjSBAAA4EDTBAAA4EDTBAAA4EDTBAAA4DD4C8ErUdTC5yjVviVMMY6t2hfLD0QlXhPAUBN1nxZjwXecn1nu74Wohe6FfHmPo0x40gQAAOBA0wQAAOBA0wQAAOBA0wQAAOBA0wQAAOCQKhQYwQEAADgWnjQBAAA40DQBAAA40DQBAAA40DQBAAA40DQBAAA40DQBAAA40DQBAAA40DQBAAA40DQBAAA4/D+veBq9tig2KQAAAABJRU5ErkJggg==", "text/plain": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "num_rows = 2\n", "num_cols = 3\n", "\n", "generated_images = model.generate(\n", " num_images=num_rows * num_cols,\n", " steps=diffusion_steps,\n", ")\n", "\n", "plt.figure(figsize=(num_cols * 2.0, num_rows * 2.0))\n", "for row in range(num_rows):\n", " for col in range(num_cols):\n", " index = row * num_cols + col\n", " plt.subplot(num_rows, num_cols, index + 1)\n", " plt.imshow(generated_images[index])\n", " plt.axis(\"off\")\n", "\n", "plt.tight_layout()" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "## Acknowledgments\n", "\n", "Thanks to [Maciej Skorski](https://www.kaggle.com/mskorski) for creating [Denoising Difussion Model](https://www.kaggle.com/code/mskorski/denoising-difussion-model). It inspires the majority of the content in this chapter." ] } ], "metadata": { "kernelspec": { "display_name": "py39", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.13" } }, "nbformat": 4, "nbformat_minor": 2 }