{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Deep Learning Models -- A collection of various deep learning architectures, models, and tips for TensorFlow and PyTorch in Jupyter Notebooks.\n",
    "- Author: Sebastian Raschka\n",
    "- GitHub Repository: https://github.com/rasbt/deeplearning-models"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 1,
   "metadata": {},
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Sebastian Raschka \n",
      "\n",
      "CPython 3.6.1\n",
      "IPython 6.0.0\n",
      "\n",
      "tensorflow 1.2.0\n"
     ]
    }
   ],
   "source": [
    "%load_ext watermark\n",
    "%watermark -a 'Sebastian Raschka' -v -p tensorflow"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "# Model Zoo -- Siamese Network with Multilayer Perceptrons"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": 2,
   "metadata": {
    "scrolled": true
   },
   "outputs": [
    {
     "name": "stdout",
     "output_type": "stream",
     "text": [
      "Extracting ./train-images-idx3-ubyte.gz\n",
      "Extracting ./train-labels-idx1-ubyte.gz\n",
      "Extracting ./t10k-images-idx3-ubyte.gz\n",
      "Extracting ./t10k-labels-idx1-ubyte.gz\n",
      "Initializing variables:\n",
      "<tf.Variable 'siamese_net/fc_1/weights:0' shape=(784, 256) dtype=float32_ref>\n",
      "<tf.Variable 'siamese_net/fc_1/biases:0' shape=(256,) dtype=float32_ref>\n",
      "<tf.Variable 'siamese_net/fc_2/weights:0' shape=(256, 256) dtype=float32_ref>\n",
      "<tf.Variable 'siamese_net/fc_2/biases:0' shape=(256,) dtype=float32_ref>\n",
      "<tf.Variable 'siamese_net/fc_3/weights:0' shape=(256, 1) dtype=float32_ref>\n",
      "<tf.Variable 'siamese_net/fc_3/biases:0' shape=(1,) dtype=float32_ref>\n",
      "Epoch: 001 | AvgCost: 0.472\n",
      "Epoch: 002 | AvgCost: 0.258\n",
      "Epoch: 003 | AvgCost: 0.250\n",
      "Epoch: 004 | AvgCost: 0.250\n",
      "Epoch: 005 | AvgCost: 0.250\n"
     ]
    }
   ],
   "source": [
    "import numpy as np\n",
    "import tensorflow as tf\n",
    "from tensorflow.examples.tutorials.mnist import input_data\n",
    "\n",
    "\n",
    "##########################\n",
    "### SETTINGS\n",
    "##########################\n",
    "\n",
    "# General settings\n",
    "\n",
    "random_seed = 0\n",
    "\n",
    "# Hyperparameters\n",
    "learning_rate = 0.001\n",
    "training_epochs = 5\n",
    "batch_size = 100\n",
    "margin = 1.0\n",
    "\n",
    "# Architecture\n",
    "n_hidden_1 = 256\n",
    "n_hidden_2 = 256\n",
    "n_input = 784\n",
    "n_classes = 1 # for 'true' and 'false' matches\n",
    "\n",
    "\n",
    "def fully_connected(inputs, output_nodes, activation=None, seed=None):\n",
    "\n",
    "    input_nodes = inputs.get_shape().as_list()[1]\n",
    "    weights = tf.get_variable(name='weights', \n",
    "                              shape=(input_nodes, output_nodes),\n",
    "                              initializer=tf.truncated_normal_initializer(\n",
    "                                  mean=0.0,\n",
    "                                  stddev=0.001,\n",
    "                                  dtype=tf.float32,\n",
    "                                  seed=seed))\n",
    "\n",
    "    biases = tf.get_variable(name='biases', \n",
    "                             shape=(output_nodes,),\n",
    "                             initializer=tf.constant_initializer(\n",
    "                                 value=0.0, \n",
    "                                 dtype=tf.float32))\n",
    "                              \n",
    "    act = tf.matmul(inputs, weights) + biases\n",
    "    if activation is not None:\n",
    "        act = activation(act)\n",
    "    return act\n",
    "\n",
    "\n",
    "def euclidean_distance(x_1, x_2):\n",
    "    return tf.sqrt(tf.maximum(tf.sum(\n",
    "        tf.square(x - y), axis=1, keepdims=True), 1e-06))\n",
    "\n",
    "def contrastive_loss(x_1, x_2, margin=1.0):\n",
    "    return (x_1 * tf.square(x_2) +\n",
    "            (1.0 - x_1) * tf.square(tf.maximum(margin - x_2, 0.)))\n",
    "\n",
    "\n",
    "##########################\n",
    "### GRAPH DEFINITION\n",
    "##########################\n",
    "\n",
    "g = tf.Graph()\n",
    "with g.as_default():\n",
    "    \n",
    "    tf.set_random_seed(random_seed)\n",
    "\n",
    "    # Input data\n",
    "    tf_x_1 = tf.placeholder(tf.float32, [None, n_input], name='inputs_1')\n",
    "    tf_x_2 = tf.placeholder(tf.float32, [None, n_input], name='inputs_2')\n",
    "    tf_y = tf.placeholder(tf.float32, [None], \n",
    "                          name='targets') # here: 'true' or 'false' valuess\n",
    "\n",
    "    # Siamese Network\n",
    "    def build_mlp(inputs):\n",
    "        with tf.variable_scope('fc_1'):\n",
    "            layer_1 = fully_connected(inputs, n_hidden_1, \n",
    "                                      activation=tf.nn.relu)\n",
    "        with tf.variable_scope('fc_2'):\n",
    "            layer_2 = fully_connected(layer_1, n_hidden_2, \n",
    "                                      activation=tf.nn.relu)\n",
    "        with tf.variable_scope('fc_3'):\n",
    "            out_layer = fully_connected(layer_2, n_classes, \n",
    "                                        activation=tf.nn.relu)\n",
    "\n",
    "        return out_layer\n",
    "    \n",
    "    \n",
    "    with tf.variable_scope('siamese_net', reuse=False):\n",
    "        pred_left = build_mlp(tf_x_1)\n",
    "    with tf.variable_scope('siamese_net', reuse=True):\n",
    "        pred_right = build_mlp(tf_x_2)\n",
    "    \n",
    "    # Loss and optimizer\n",
    "    loss = contrastive_loss(pred_left, pred_right)\n",
    "    cost = tf.reduce_mean(loss, name='cost')\n",
    "    optimizer = tf.train.GradientDescentOptimizer(learning_rate=learning_rate)\n",
    "    train = optimizer.minimize(cost, name='train')\n",
    "    \n",
    "##########################\n",
    "### TRAINING & EVALUATION\n",
    "##########################\n",
    "\n",
    "np.random.seed(random_seed) # set seed for mnist shuffling\n",
    "mnist = input_data.read_data_sets(\"./\", one_hot=False)\n",
    "\n",
    "with tf.Session(graph=g) as sess:\n",
    "    \n",
    "    print('Initializing variables:')\n",
    "    sess.run(tf.global_variables_initializer())\n",
    "    for i in tf.get_collection(tf.GraphKeys.GLOBAL_VARIABLES,\n",
    "                               scope='siamese_net'):\n",
    "        print(i)\n",
    "\n",
    "    for epoch in range(training_epochs):\n",
    "        avg_cost = 0.\n",
    "        \n",
    "        total_batch = mnist.train.num_examples // batch_size // 2\n",
    "\n",
    "        for i in range(total_batch):\n",
    "            \n",
    "            batch_x_1, batch_y_1 = mnist.train.next_batch(batch_size)\n",
    "            batch_x_2, batch_y_2 = mnist.train.next_batch(batch_size)\n",
    "            batch_y = (batch_y_1 == batch_y_2).astype('float32')\n",
    "            \n",
    "            _, c = sess.run(['train', 'cost:0'], feed_dict={'inputs_1:0': batch_x_1,\n",
    "                                                            'inputs_2:0': batch_x_2,\n",
    "                                                            'targets:0': batch_y})\n",
    "            avg_cost += c\n",
    "\n",
    "        print(\"Epoch: %03d | AvgCost: %.3f\" % (epoch + 1, avg_cost / (i + 1)))"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "- Todo: add embedding visualization"
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.7.1"
  }
 },
 "nbformat": 4,
 "nbformat_minor": 2
}