{ "cells": [ { "cell_type": "code", "execution_count": 1, "metadata": {}, "outputs": [], "source": [ "# standard python tools\n", "import numpy as np\n", "from glob import glob\n", "import os" ] }, { "cell_type": "code", "execution_count": 2, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "findspark could not be initialized\n", "[]\n" ] } ], "source": [ "# make sure pyqae is available\n", "import pyqae as pq\n", "import pyspark\n", "from pyqae.images import viz\n", "# lazy image \n", "from pyqae.images.lazy import paths_to_tiled_image, DiskMappedLazyImage, backends\n", "print(backends)" ] }, { "cell_type": "code", "execution_count": 3, "metadata": {}, "outputs": [], "source": [ "base_path = os.path.join(\"..\",\"test\",\"resources\")\n", "little_image_list = glob(os.path.join(base_path,\"multilayer_tif\",\"*.tif\"))\n", "big_image_list = [little_image_list[-1]]" ] }, { "cell_type": "code", "execution_count": 4, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "(120, 120)\n" ] } ], "source": [ "start_img = DiskMappedLazyImage(big_image_list[0], backends[0])\n", "print(start_img.shape)" ] }, { "cell_type": "code", "execution_count": 5, "metadata": {}, "outputs": [], "source": [ "def create_line_break(x_tile_size, x_dim):\n", " return [(x, min(x+x_tile_size, x_dim)) for x in range(0, x_dim, x_tile_size)]\n", "\n", "def create_tiles(x_tile_size, x_dim, y_tile_size, y_dim):\n", " return [(x_tile, y_tile) for x_tile in create_line_break(x_tile_size, x_dim) \n", " for y_tile in create_line_break(y_tile_size, y_dim)]\n", "def tile_dict(tile_list):\n", " return [{'x_start': int(xs), 'y_start': int(ys), \n", " 'x_end': int(xe), 'y_end': int(ye)} for ((xs, xe), (ys, ye)) in tile_list]" ] }, { "cell_type": "code", "execution_count": 6, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[(0, 256), (256, 512), (512, 768), (768, 1024), (1024, 1025)]\n" ] } ], "source": [ "print(create_line_break(256, 1025))" ] }, { "cell_type": "code", "execution_count": 7, "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "[[[2990 3196]\n", " [1134 1296]]\n", "\n", " [[1495 1794]\n", " [1620 1782]]]\n", "[[[3131 3196]\n", " [1815 1980]]\n", "\n", " [[2626 2727]\n", " [1485 1650]]]\n", "[[[2700 2925]\n", " [1480 1665]]\n", "\n", " [[3150 3196]\n", " [1295 1480]]]\n" ] } ], "source": [ "im_size = np.random.randint(1000, 4000, size = 2)\n", "for tile_size in np.random.randint(100, 300, size = (3,2)):\n", " c_tiles = create_tiles(tile_size[0], im_size[0],tile_size[1], im_size[1])\n", " tile_sum = np.sum([(xe-xs)*(ye-ys) for ((xs, xe), (ys, ye)) in c_tiles])\n", " print(np.random.permutation(c_tiles)[0:2])\n", " assert tile_sum == im_size[0]*im_size[1], \"Total pixel count cannot change, {} != {}\".format(im_size[0]*im_size[1], tile_sum)" ] }, { "cell_type": "code", "execution_count": 8, "metadata": {}, "outputs": [ { "name": "stderr", "output_type": "stream", "text": [ "/srv/conda/lib/python3.6/site-packages/pyspark/sql/session.py:340: UserWarning: inferring schema from dict is deprecated,please use pyspark.sql.Row instead\n", " warnings.warn(\"inferring schema from dict is deprecated,\"\n" ] }, { "data": { "text/plain": [ "DataFrame[x_end: bigint, x_start: bigint, y_end: bigint, y_start: bigint]" ] }, "execution_count": 8, "metadata": {}, "output_type": "execute_result" } ], "source": [ "from pyspark.sql import SQLContext\n", "from pyspark import SparkContext\n", "sc = SparkContext()\n", "sqlContext = SQLContext(sc)\n", "sqlContext.createDataFrame(tile_dict(c_tiles))" ] }, { "cell_type": "code", "execution_count": 9, "metadata": {}, "outputs": [], "source": [ "sc.stop()" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.6.5" } }, "nbformat": 4, "nbformat_minor": 1 }