{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/usr/local/lib/python2.7/dist-packages/h5py/__init__.py:36: FutureWarning: Conversion of the second argument of issubdtype from `float` to `np.floating` is deprecated. In future, it will be treated as `np.float64 == np.dtype(float).type`.\n", " from ._conv import register_converters as _register_converters\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "[[22. 28.]\n", " [49. 64.]]\n" ] } ], "source": [ "import tensorflow as tf\n", "a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')\n", "b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')\n", "c = tf.matmul(a, b)\n", "# Creates a session with log_device_placement set to True.\n", "sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))\n", "# Runs the op.\n", "print(sess.run(c))\n" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[[22. 28.]\n", " [49. 64.]]\n" ] } ], "source": [ "with tf.device('/cpu:0'):\n", " a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')\n", " b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')\n", "c = tf.matmul(a, b)\n", "# Creates a session with log_device_placement set to True.\n", "sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))\n", "# Runs the op.\n", "print(sess.run(c))\n" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[[22. 28.]\n", " [49. 64.]]\n" ] } ], "source": [ "with tf.device('/gpu:0'):\n", " a = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[2, 3], name='a')\n", " b = tf.constant([1.0, 2.0, 3.0, 4.0, 5.0, 6.0], shape=[3, 2], name='b')\n", "c = tf.matmul(a, b)\n", "# Creates a session with log_device_placement set to True.\n", "sess = tf.Session(config=tf.ConfigProto(log_device_placement=True))\n", "# Runs the op.\n", "print(sess.run(c))" ] }, { "cell_type": "code", "execution_count": 14, "metadata": {}, "outputs": [], "source": [ "import sys\n", "import numpy as np\n", "import tensorflow as tf\n", "from datetime import datetime\n", "\n", "def matmul(device_name=\"gpu\", shape=(10,10)):\n", "#device_name = sys.argv[1] # Choose device from cmd line. Options: gpu or cpu\n", "#shape = (int(sys.argv[2]), int(sys.argv[2]))\n", " if device_name == \"gpu0\":\n", " device_name = \"/gpu:0\"\n", " elif device_name == \"gpu1\":\n", " device_name = \"/gpu:1\"\n", " else:\n", " device_name = \"/cpu:0\"\n", "\n", " with tf.device(device_name):\n", " random_matrix = tf.random_uniform(shape=shape, minval=0, maxval=1)\n", " dot_operation = tf.matmul(random_matrix, tf.transpose(random_matrix))\n", " sum_operation = tf.reduce_sum(dot_operation)\n", "\n", "\n", " startTime = datetime.now()\n", " with tf.Session(config=tf.ConfigProto(log_device_placement=True)) as session:\n", " result = session.run(sum_operation)\n", " print(result)\n", "\n", " # It can be hard to see the results on the terminal with lots of output -- add some newlines to improve readability.\n", " print(\"\\n\" * 5)\n", " print(\"Shape:\", shape, \"Device:\", device_name)\n", " print(\"Time taken:\", datetime.now() - startTime)\n", "\n", " print(\"\\n\" * 5)" ] }, { "cell_type": "code", "execution_count": 20, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "254401050000.0\n", "\n", "\n", "\n", "\n", "\n", "\n", "('Shape:', (10000, 10000), 'Device:', '/cpu:0')\n", "('Time taken:', datetime.timedelta(0, 20, 334651))\n", "\n", "\n", "\n", "\n", "\n", "\n" ] } ], "source": [ "matmul(device_name=\"cpu\",shape=(10000,10000))\n" ] }, { "cell_type": "code", "execution_count": 15, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "250021800000.0\n", "\n", "\n", "\n", "\n", "\n", "\n", "('Shape:', (10000, 10000), 'Device:', '/gpu:0')\n", "('Time taken:', datetime.timedelta(0, 0, 419122))\n", "\n", "\n", "\n", "\n", "\n", "\n" ] } ], "source": [ "matmul(device_name=\"gpu0\",shape=(10000,10000))" ] }, { "cell_type": "code", "execution_count": 16, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "250002110000.0\n", "\n", "\n", "\n", "\n", "\n", "\n", "('Shape:', (10000, 10000), 'Device:', '/gpu:1')\n", "('Time taken:', datetime.timedelta(0, 0, 907009))\n", "\n", "\n", "\n", "\n", "\n", "\n" ] } ], "source": [ "matmul(device_name=\"gpu1\",shape=(10000,10000))" ] }, { "cell_type": "code", "execution_count": 19, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Tue Feb 20 13:38:53 2018 \n", "+-----------------------------------------------------------------------------+\n", "| NVIDIA-SMI 387.26 Driver Version: 387.26 |\n", "|-------------------------------+----------------------+----------------------+\n", "| GPU Name Persistence-M| Bus-Id Disp.A | Volatile Uncorr. ECC |\n", "| Fan Temp Perf Pwr:Usage/Cap| Memory-Usage | GPU-Util Compute M. |\n", "|===============================+======================+======================|\n", "| 0 GeForce GTX TIT... Off | 00000000:02:00.0 Off | N/A |\n", "| 31% 48C P8 29W / 250W | 5801MiB / 6080MiB | 0% Default |\n", "+-------------------------------+----------------------+----------------------+\n", "| 1 GeForce GTX TIT... Off | 00000000:03:00.0 Off | N/A |\n", "| 22% 51C P2 76W / 250W | 11633MiB / 12207MiB | 0% Default |\n", "+-------------------------------+----------------------+----------------------+\n", " \n", "+-----------------------------------------------------------------------------+\n", "| Processes: GPU Memory |\n", "| GPU PID Type Process name Usage |\n", "|=============================================================================|\n", "+-----------------------------------------------------------------------------+\n" ] } ], "source": [ "%%bash\n", "nvidia-smi" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 2", "language": "python", "name": "python2" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 2 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython2", "version": "2.7.12" } }, "nbformat": 4, "nbformat_minor": 2 }