{ "cells": [ { "cell_type": "code", "execution_count": 2, "id": "31bea716-19e2-4180-bcd7-88b65e8a6b52", "metadata": {}, "outputs": [], "source": [ "import yaml" ] }, { "cell_type": "code", "execution_count": 3, "id": "04458829-276d-4518-acde-b3f7ef5a7108", "metadata": {}, "outputs": [], "source": [ "# load menu\n", "with open(\"mnt/city-directories/01-user-input/menu.yml\", 'r') as f:\n", " menu = yaml.safe_load(f)" ] }, { "cell_type": "code", "execution_count": 4, "id": "52947ec8-c31e-4e7c-9880-1c3c0ce8c217", "metadata": {}, "outputs": [], "source": [ "if menu['toolbox']:\n", " import os\n", " import glob\n", " import math\n", " import geopandas as gpd\n", " import pandas as pd\n", " import numpy as np\n", " import pint\n", " from pathlib import Path\n", " import matplotlib.pyplot as plt\n", " import rasterio\n", " from rasterio.warp import calculate_default_transform, reproject, Resampling\n", " from rasterio.plot import show\n", " from rasterstats import zonal_stats\n", " from osgeo import gdal, gdalconst\n", " from scipy.ndimage import generic_filter\n", " from shapely.geometry import LineString\n", " from shapely.ops import linemerge, unary_union\n", " import fiona\n", " import osmnx as ox\n", " from shapely.geometry import LineString, mapping\n", " from skimage import measure\n", " from shapely.ops import unary_union\n", " from rasterstats import zonal_stats\n", " from affine import Affine\n", " from rasterio.features import geometry_mask\n", " import fiona\n", " from rasterio.crs import CRS\n", " import warnings\n", " from rasterio.merge import merge \n", " from rasterio.transform import from_bounds\n", " import csv\n", " from shapely.geometry import LineString, MultiPoint\n", " from shapely.ops import split, snap\n", " from rasterio.mask import mask\n", " import rasterio.features\n", " from rasterio.enums import Resampling\n", " from rasterio.vrt import WarpedVRT\n", " from rasterio.features import shapes\n", " from shapely.geometry import shape" ] }, { "cell_type": "code", "execution_count": 5, "id": "135fdd6a-bd1c-402f-a9a5-110594856059", "metadata": {}, "outputs": [], "source": [ "# SET UP ##############################################\n", "\n", "# load city inputs files, to be updated for each city scan\n", "with open(\"city_inputs.yml\", 'r') as f:\n", " city_inputs = yaml.safe_load(f)\n", "\n", "city = city_inputs['city_name'].replace(' ', '_').lower()\n", "country = city_inputs['country_name'].replace(' ', '_').lower()\n", "# load global inputs, such as data sources that generally remain the same across scans\n", "\n", "with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", "\n", "\n", "# transform the input shp to correct prj (epsg 4326)\n", "aoi_file = gpd.read_file(city_inputs['AOI_path']).to_crs(epsg = 4326)\n", "features = aoi_file.geometry\n", "\n", "# Define output folder ---------\n", "output_folder = Path('mnt/city-directories/02-process-output')\n", "\n", "if not os.path.exists(output_folder):\n", " os.mkdir(output_folder)\n", " \n" ] }, { "cell_type": "code", "execution_count": 16, "id": "cb7ec1b4", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Merged pluvial data saved as mnt/city-directories/02-process-output/goris_merged_pluvial_data.tif\n" ] } ], "source": [ "#merge pluvial \n", "\n", "def merge_pluvial_files():\n", " matching_files = glob.glob(os.path.join(output_folder, f\"{city}_pluvial_2020_*.tif\"))\n", " \n", " if matching_files:\n", " src_files_to_merge = [rasterio.open(pluvial_file) for pluvial_file in matching_files]\n", " \n", " try:\n", " merged_data, merged_transform = merge(src_files_to_merge)\n", " merged_crs = src_files_to_merge[0].crs\n", " \n", " output_file = os.path.join(output_folder, f\"{city}_merged_pluvial_data.tif\")\n", " with rasterio.open(output_file, 'w', driver='GTiff',\n", " width=merged_data.shape[2], height=merged_data.shape[1],\n", " count=1, dtype=merged_data.dtype,\n", " crs=merged_crs, transform=merged_transform) as dst:\n", " dst.write(merged_data)\n", " \n", " print(f\"Merged pluvial data saved as {output_file}\")\n", " return output_file\n", " except Exception as e:\n", " print(f\"Error occurred while merging: {e}\")\n", " return None\n", " else:\n", " print(\"Error: No pluvial files found.\")\n", " return None\n", "merged_pluvial_data = merge_pluvial_files()\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 17, "id": "a63f5868", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Merged pluvial data saved as mnt/city-directories/02-process-output/goris_merged_pluvial_data_utm.tif\n" ] } ], "source": [ "#merge pluvial \n", "\n", "def merge_pluvial_files_UTM():\n", " matching_files = glob.glob(os.path.join(output_folder, f\"{city}_pluvial_2020_*_utm.tif\"))\n", " \n", " if matching_files:\n", " src_files_to_merge = [rasterio.open(pluvial_file) for pluvial_file in matching_files]\n", " \n", " merged_data, merged_transform = merge(src_files_to_merge)\n", " merged_crs = src_files_to_merge[0].crs\n", " \n", " output_file = os.path.join(output_folder, f\"{city}_merged_pluvial_data_utm.tif\")\n", " with rasterio.open(output_file, 'w', driver='GTiff',\n", " width=merged_data.shape[2], height=merged_data.shape[1],\n", " count=1, dtype=merged_data.dtype,\n", " crs=merged_crs, transform=merged_transform) as dst:\n", " dst.write(merged_data)\n", "\n", " print(f\"Merged pluvial data saved as {output_file}\")\n", " return output_file\n", " else:\n", " print(\"Error: No pluvial files found.\")\n", " return None\n", "\n", "merged_pluvial_data_utm = merge_pluvial_files_UTM()\n" ] }, { "cell_type": "code", "execution_count": 18, "id": "ad6c8ce6", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Merged fluvial data saved as mnt/city-directories/02-process-output/goris_merged_fluvial_data.tif\n" ] } ], "source": [ "def merge_fluvial_files():\n", " matching_files = glob.glob(os.path.join(output_folder, f\"{city}_fluvial_2020_*.tif\"))\n", " \n", " if matching_files:\n", " src_files_to_merge = [rasterio.open(fluvial_file) for fluvial_file in matching_files]\n", " \n", " merged_data, merged_transform = merge(src_files_to_merge)\n", " merged_crs = src_files_to_merge[0].crs\n", " \n", " output_file = os.path.join(output_folder, f\"{city}_merged_fluvial_data.tif\")\n", " with rasterio.open(output_file, 'w', driver='GTiff',\n", " width=merged_data.shape[2], height=merged_data.shape[1],\n", " count=1, dtype=merged_data.dtype,\n", " crs=merged_crs, transform=merged_transform) as dst:\n", " dst.write(merged_data)\n", "\n", " print(f\"Merged fluvial data saved as {output_file}\")\n", " return output_file\n", " else:\n", " print(\"Error: No fluvial files found.\")\n", " return None\n", "merged_fluvial_data = merge_fluvial_files()\n" ] }, { "cell_type": "code", "execution_count": 19, "id": "42f82fa4", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Merged fluvial data saved as mnt/city-directories/02-process-output/goris_merged_fluvial_data_utm.tif\n" ] } ], "source": [ "def merge_fluvial_files_UTM():\n", " matching_files = glob.glob(os.path.join(output_folder, f\"{city}_fluvial_2020_*_utm.tif\"))\n", " \n", " if matching_files:\n", " src_files_to_merge = [rasterio.open(fluvial_file) for fluvial_file in matching_files]\n", " \n", " merged_data, merged_transform = merge(src_files_to_merge)\n", " merged_crs = src_files_to_merge[0].crs # Use the CRS of the first file\n", "\n", " output_file = os.path.join(output_folder, f\"{city}_merged_fluvial_data_utm.tif\")\n", " with rasterio.open(output_file, 'w', driver='GTiff',\n", " width=merged_data.shape[2], height=merged_data.shape[1],\n", " count=1, dtype=merged_data.dtype,\n", " crs=merged_crs, transform=merged_transform) as dst:\n", " dst.write(merged_data)\n", "\n", " print(f\"Merged fluvial data saved as {output_file}\")\n", " return output_file\n", " else:\n", " print(\"Error: No fluvial files found.\")\n", " return None\n", "merged_fluvial_data_UTM = merge_fluvial_files_UTM()\n" ] }, { "cell_type": "code", "execution_count": 91, "id": "b0bb29bf", "metadata": {}, "outputs": [], "source": [ "#merge comb files and save a merged file (everything with a value is 1, otherwise 0 )" ] }, { "cell_type": "code", "execution_count": 92, "id": "c3aebab7", "metadata": {}, "outputs": [], "source": [ "#merge coastal files and save a merged file (everything with a value is 1, otherwise 0 )" ] }, { "cell_type": "code", "execution_count": 20, "id": "bca43804", "metadata": {}, "outputs": [], "source": [ "#resampling data\n", "def resample_raster(input_raster, target_shape):\n", " # Resample raster to match the target shape\n", " data = input_raster.read(1, out_shape=target_shape, resampling=Resampling.nearest)\n", " return data" ] }, { "cell_type": "code", "execution_count": 22, "id": "f59b8c2a", "metadata": {}, "outputs": [], "source": [ "#PLOT CHECK\n", "def plot_data():\n", " if menu.get('flood') and menu.get('wsf'):\n", " pluvial_path = os.path.join(output_folder, f\"{city}_merged_pluvial_data_utm.tif\")\n", " wsf_path = os.path.join(output_folder, f\"{city}_wsf_utm.tiff\")\n", " # Plot features\n", " fig, ax = plt.subplots(figsize=(10, 10))\n", " features_utm = features.to_crs('EPSG:32638')\n", " features_utm.plot(ax=ax, facecolor='none', edgecolor='red')\n", " \n", "\n", " # Plot pluvial data\n", " with rasterio.open(pluvial_path) as pluvial_src:\n", " show(pluvial_src, ax=ax, cmap='Blues', alpha=0.5)\n", "\n", " # Plot WSF data\n", " with rasterio.open(wsf_path) as wsf_src:\n", " show(wsf_src, ax=ax, cmap='Reds', alpha=0.5)\n", "\n", " ax.set_title(\"Features, Pluvial Data, and WSF\")\n", " ax.set_xlabel(\"Longitude\")\n", " ax.set_ylabel(\"Latitude\")\n", " plt.show()\n", " plt.savefig\n" ] }, { "cell_type": "code", "execution_count": 24, "id": "a8f8850b", "metadata": {}, "outputs": [], "source": [ "#WSF and Pu\n", "\n", "def get_pu_wsf():\n", " if menu.get('flood') and menu.get('wsf'):\n", " pu_path = os.path.join(output_folder, f\"{city}_pluvial_2020_lt1_utm.tif\")\n", " wsf_path = os.path.join(output_folder, f\"{city}_wsf_utm.tiff\")\n", "\n", " # Reproject features to UTM\n", " features_utm = features.to_crs('EPSG:32638') \n", "\n", " with rasterio.open(pu_path) as src_pluvial:\n", " pluvial_data, pluvial_transform = mask(src_pluvial, features_utm.geometry, crop=True)\n", " pluvial_data = pluvial_data[0] \n", " pluvial_affine = pluvial_transform\n", " pluvial_resolution = abs(pluvial_transform[0] * pluvial_transform[4]) \n", "\n", " with rasterio.open(wsf_path) as src_wsf:\n", " wsf_data, wsf_transform = mask(src_wsf, features_utm.geometry, crop=True)\n", " wsf_data = wsf_data[0] \n", " wsf_affine = wsf_transform\n", "\n", " \n", " min_height = min(pluvial_data.shape[0], wsf_data.shape[0])\n", " min_width = min(pluvial_data.shape[1], wsf_data.shape[1])\n", " pluvial_data = pluvial_data[:min_height, :min_width]\n", " wsf_data = wsf_data[:min_height, :min_width]\n", "\n", " \n", " unique_years = np.unique(wsf_data)\n", " unique_years = unique_years[unique_years != 0]\n", " unique_years = unique_years[unique_years != 1985] \n", "\n", " stats_by_year = {}\n", "\n", " for year in unique_years:\n", " masked_wsf_data = np.where(wsf_data == year, 1, 0)\n", " masked_flooded_data = masked_wsf_data * pluvial_data\n", " stats = zonal_stats(features_utm.geometry, masked_flooded_data, affine=pluvial_transform, stats=\"sum\", nodata=-9999)\n", " area = stats[0]['sum'] * pluvial_resolution \n", "\n", " stats_by_year[year] = area\n", "\n", " return stats_by_year\n", "\n", " else:\n", " print(\"Flood or WSF menu not selected.\")\n", " return None\n", "\n", "# Stats by year\n", "\n", "\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 247, "id": "fec28630", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'\\ndef get_pu_wsf(output_folder, city, menu, features):\\n if menu.get(\\'flood\\') and menu.get(\\'wsf\\'):\\n pu_path = os.path.join(output_folder, f\"{city}_merged_pluvial_data_utm.tif\")\\n wsf_path = os.path.join(output_folder, f\"{city}_wsf_utm.tiff\")\\n\\n # Reproject features to UTM\\n features_utm = features.to_crs(\\'EPSG:32638\\') \\n\\n with rasterio.open(pu_path) as src_pluvial:\\n pluvial_data, pluvial_transform = mask(src_pluvial, features_utm.geometry, crop=True)\\n pluvial_data = pluvial_data[0] \\n pluvial_affine = pluvial_transform\\n pluvial_resolution = abs(pluvial_transform[0] * pluvial_transform[4]) \\n\\n with rasterio.open(wsf_path) as src_wsf:\\n wsf_data, wsf_transform = mask(src_wsf, features_utm.geometry, crop=True)\\n wsf_data = wsf_data[0] \\n wsf_affine = wsf_transform\\n\\n # Ensure both arrays have the same shape\\n min_height = min(pluvial_data.shape[0], wsf_data.shape[0])\\n min_width = min(pluvial_data.shape[1], wsf_data.shape[1])\\n pluvial_data = pluvial_data[:min_height, :min_width]\\n wsf_data = wsf_data[:min_height, :min_width]\\n\\n # Calculate flooded area for each year\\n pixel_areas = {}\\n unique_years = np.unique(wsf_data)\\n for year in unique_years:\\n if year != 0 and year >= 1986: # Exclude background value and years before 1986\\n # Mask WSF data for the current year\\n masked_wsf_data = np.where(wsf_data == year, 1, 0)\\n # Multiply with pluvial data to get flooded area\\n masked_flooded_data = masked_wsf_data * pluvial_data\\n # Calculate area\\n area = np.count_nonzero(masked_flooded_data) * pluvial_resolution\\n pixel_areas[year] = area\\n\\n return pixel_areas\\n else:\\n print(\"Flood or WSF menu not selected.\")\\n return None\\n\\n# Call the function and print results\\npu_wsf_areas = get_pu_wsf(output_folder, city, menu, features)\\nif stats_by_year is not None:\\n years = list(stats_by_year.keys())\\n areas = list(stats_by_year.values())\\n\\n # Filter years for plotting\\n years_to_plot = [1986, 1995, 2005, 2015]\\n areas_to_plot = [stats_by_year.get(year, np.nan) for year in years_to_plot]\\n\\n # Interpolate missing years\\' data for a smoother curve\\n interp_years = np.arange(min(years_to_plot), max(years_to_plot) + 1)\\n interp_areas = np.interp(interp_years, years_to_plot, areas_to_plot)\\n\\n plt.figure(figsize=(10, 6))\\n plt.plot(interp_years, interp_areas, marker=\\'o\\', linestyle=\\'-\\')\\n plt.title(\\'Flooded Area from 1986 to 2015 (Interpolated)\\')\\n plt.xlabel(\\'Year\\')\\n plt.ylabel(\\'Area (square units)\\')\\n plt.grid(True)\\n plt.tight_layout()\\n plt.show()\\nelse:\\n print(\"No areas calculated.\")\\n'" ] }, "execution_count": 247, "metadata": {}, "output_type": "execute_result" } ], "source": [ "# Function to calculate flooded area from WSF data\n", "'''\n", "def get_pu_wsf(output_folder, city, menu, features):\n", " if menu.get('flood') and menu.get('wsf'):\n", " pu_path = os.path.join(output_folder, f\"{city}_merged_pluvial_data_utm.tif\")\n", " wsf_path = os.path.join(output_folder, f\"{city}_wsf_utm.tiff\")\n", "\n", " # Reproject features to UTM\n", " features_utm = features.to_crs('EPSG:32638') \n", "\n", " with rasterio.open(pu_path) as src_pluvial:\n", " pluvial_data, pluvial_transform = mask(src_pluvial, features_utm.geometry, crop=True)\n", " pluvial_data = pluvial_data[0] \n", " pluvial_affine = pluvial_transform\n", " pluvial_resolution = abs(pluvial_transform[0] * pluvial_transform[4]) \n", "\n", " with rasterio.open(wsf_path) as src_wsf:\n", " wsf_data, wsf_transform = mask(src_wsf, features_utm.geometry, crop=True)\n", " wsf_data = wsf_data[0] \n", " wsf_affine = wsf_transform\n", "\n", " # Ensure both arrays have the same shape\n", " min_height = min(pluvial_data.shape[0], wsf_data.shape[0])\n", " min_width = min(pluvial_data.shape[1], wsf_data.shape[1])\n", " pluvial_data = pluvial_data[:min_height, :min_width]\n", " wsf_data = wsf_data[:min_height, :min_width]\n", "\n", " # Calculate flooded area for each year\n", " pixel_areas = {}\n", " unique_years = np.unique(wsf_data)\n", " for year in unique_years:\n", " if year != 0 and year >= 1986: # Exclude background value and years before 1986\n", " # Mask WSF data for the current year\n", " masked_wsf_data = np.where(wsf_data == year, 1, 0)\n", " # Multiply with pluvial data to get flooded area\n", " masked_flooded_data = masked_wsf_data * pluvial_data\n", " # Calculate area\n", " area = np.count_nonzero(masked_flooded_data) * pluvial_resolution\n", " pixel_areas[year] = area\n", "\n", " return pixel_areas\n", " else:\n", " print(\"Flood or WSF menu not selected.\")\n", " return None\n", "\n", "# Call the function and print results\n", "pu_wsf_areas = get_pu_wsf(output_folder, city, menu, features)\n", "if stats_by_year is not None:\n", " years = list(stats_by_year.keys())\n", " areas = list(stats_by_year.values())\n", "\n", " # Filter years for plotting\n", " years_to_plot = [1986, 1995, 2005, 2015]\n", " areas_to_plot = [stats_by_year.get(year, np.nan) for year in years_to_plot]\n", "\n", " # Interpolate missing years' data for a smoother curve\n", " interp_years = np.arange(min(years_to_plot), max(years_to_plot) + 1)\n", " interp_areas = np.interp(interp_years, years_to_plot, areas_to_plot)\n", "\n", " plt.figure(figsize=(10, 6))\n", " plt.plot(interp_years, interp_areas, marker='o', linestyle='-')\n", " plt.title('Flooded Area from 1986 to 2015 (Interpolated)')\n", " plt.xlabel('Year')\n", " plt.ylabel('Area (square units)')\n", " plt.grid(True)\n", " plt.tight_layout()\n", " plt.show()\n", "else:\n", " print(\"No areas calculated.\")\n", "'''\n" ] }, { "cell_type": "code", "execution_count": 28, "id": "7e9a8ee8", "metadata": {}, "outputs": [], "source": [ "#Fluvial and WSF\n", "\n", "def get_fu_wsf():\n", " if menu.get('flood') and menu.get('wsf'):\n", " fu_path = os.path.join(output_folder, f\"{city}_fluvial_2020_lt1_utm.tif\") # Update the file path for fluvial data\n", " wsf_path = os.path.join(output_folder, f\"{city}_wsf_utm.tiff\")\n", "\n", " # Reproject features to UTM\n", " features_utm = features.to_crs('EPSG:32638') \n", "\n", " with rasterio.open(fu_path) as src_fluvial: # Open the fluvial data\n", " fluvial_data, fluvial_transform = mask(src_fluvial, features_utm.geometry, crop=True)\n", " fluvial_data = fluvial_data[0] \n", " fluvial_affine = fluvial_transform\n", " fluvial_resolution = abs(fluvial_transform[0] * fluvial_transform[4]) \n", "\n", " with rasterio.open(wsf_path) as src_wsf:\n", " wsf_data, wsf_transform = mask(src_wsf, features_utm.geometry, crop=True)\n", " wsf_data = wsf_data[0] \n", " wsf_affine = wsf_transform\n", "\n", " # Ensure both arrays have the same shape\n", " min_height = min(fluvial_data.shape[0], wsf_data.shape[0])\n", " min_width = min(fluvial_data.shape[1], wsf_data.shape[1])\n", " fluvial_data = fluvial_data[:min_height, :min_width]\n", " wsf_data = wsf_data[:min_height, :min_width]\n", "\n", " # Get unique years from the WSF data\n", " unique_years = np.unique(wsf_data)\n", " unique_years = unique_years[unique_years != 0]\n", " unique_years = unique_years[unique_years != 1985] \n", "\n", " stats_by_year = {}\n", "\n", " for year in unique_years:\n", " masked_wsf_data = np.where(wsf_data == year, 1, 0)\n", " masked_flooded_data = masked_wsf_data * fluvial_data \n", " stats = zonal_stats(features_utm.geometry, masked_flooded_data, affine=fluvial_transform, stats=\"sum\", nodata=-9999)\n", " area = stats[0]['sum'] * fluvial_resolution \n", "\n", " stats_by_year[year] = area\n", "\n", " return stats_by_year\n", "\n", " else:\n", " print(\"Flood or WSF menu not selected.\")\n", " return None\n" ] }, { "cell_type": "code", "execution_count": 96, "id": "389ee224", "metadata": {}, "outputs": [], "source": [ "#Do the same for comb files" ] }, { "cell_type": "code", "execution_count": 97, "id": "6f80b518", "metadata": {}, "outputs": [], "source": [ "#Do the same for coastal files " ] }, { "cell_type": "code", "execution_count": 25, "id": "c0f54ca8", "metadata": {}, "outputs": [], "source": [ "#Population Pluvial #Bramka's reference\n", "\n", "def normalize_raster(src, dst_shape):\n", " \"\"\"\n", " Normalize the raster to a consistent shape using WarpedVRT.\n", " \"\"\"\n", " src_profile = src.profile\n", " src_profile['count'] = 1 # Ensure only one band is read\n", " with WarpedVRT(src, **src_profile) as vrt:\n", " data = vrt.read(1, out_shape=dst_shape)\n", " return data\n", "\n", "def get_pu_pop_norm():\n", " with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", " \n", " if menu.get('flood') and menu.get('population'):\n", " pu_path = os.path.join(output_folder, f\"{city}_pluvial_2020_lt1.tif\")\n", " try:\n", " with rasterio.open(pu_path) as pu_src:\n", " pu_shape = pu_src.shape\n", " merged_pluvial_data = pu_src.read(1)\n", " merged_pluvial_data_transform = pu_src.transform\n", " except Exception as e:\n", " print(f\"Error opening merged pluvial data raster: {e}\")\n", " return\n", "\n", " pop_path = os.path.join(output_folder, f\"{city}_population.tif\")\n", " try:\n", " with rasterio.open(pop_path) as pop_src:\n", " pop_data = normalize_raster(pop_src, pu_shape)\n", " pop_transform = pop_src.transform\n", " except Exception as e:\n", " print(f\"Error opening population raster: {e}\")\n", " return\n", " \n", "\n", " pop_data_clipped = np.clip(pop_data, 0, None)\n", " pop_percentile_60 = np.percentile(pop_data_clipped, 60)\n", "\n", " total_count = np.sum((merged_pluvial_data == 1) & (pop_data_clipped > pop_percentile_60))\n", " total_pixels = np.sum(merged_pluvial_data == 1)\n", "\n", " percentage = (total_count / total_pixels) * 100\n", " print(f\"{percentage:.2f}% of densely populated areas are located within the rainwater flood risk zone with a minimum depth of 15 cm\")\n", "\n", " csv_path = os.path.join(output_folder, 'pu_pop_area.csv')\n", " df = pd.DataFrame({'File Name': 'Combined', 'Percentage': [percentage]})\n", " df.to_csv(csv_path, index=False)\n", " print(f\"Result saved to {csv_path}\")\n", " else:\n", " print(\"Flood or population menu not selected.\")\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 29, "id": "8da3f01f", "metadata": {}, "outputs": [], "source": [ "#Population Fluvial\n", "\n", "def normalize_raster(src, dst_shape):\n", " \"\"\"\n", " Normalize the raster to a consistent shape using WarpedVRT.\n", " \"\"\"\n", " src_profile = src.profile\n", " src_profile['count'] = 1 # Ensure only one band is read\n", " with WarpedVRT(src, **src_profile) as vrt:\n", " data = vrt.read(1, out_shape=dst_shape)\n", " return data\n", "\n", "def get_fu_pop_norm():\n", " with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", " \n", " if menu.get('flood') and menu.get('population'):\n", " fu_path = os.path.join(output_folder, f\"{city}_fluvial_2020_lt1.tif\")\n", " try:\n", " with rasterio.open(fu_path) as fu_src:\n", " fu_shape = fu_src.shape\n", " merged_fluvial_data = fu_src.read(1)\n", " merged_fluvial_data_transform = fu_src.transform\n", " except Exception as e:\n", " print(f\"Error opening merged fluvial data raster: {e}\")\n", " return\n", "\n", " pop_path = os.path.join(output_folder, f\"{city}_population.tif\")\n", " try:\n", " with rasterio.open(pop_path) as pop_src:\n", " pop_data = normalize_raster(pop_src, fu_shape)\n", " pop_transform = pop_src.transform\n", " except Exception as e:\n", " print(f\"Error opening population raster: {e}\")\n", " return\n", "\n", " pop_data_clipped = np.clip(pop_data, 0, None)\n", " pop_percentile_60 = np.percentile(pop_data_clipped, 60) #60 \n", "\n", " total_count = np.sum((merged_fluvial_data == 1) & (pop_data_clipped > pop_percentile_60))\n", " total_pixels = np.sum(merged_fluvial_data == 1)\n", "\n", " percentage = (total_count / total_pixels) * 100\n", " print(f\"{percentage:.2f}% of densely populated areas are located within the fluvial flood risk zone with a minimum depth of 10 cm\")\n", "\n", " csv_path = os.path.join(output_folder, 'fu_pop_area.csv')\n", " df = pd.DataFrame({'File Name': 'Combined', 'Percentage': [percentage]})\n", " df.to_csv(csv_path, index=False)\n", " print(f\"Result saved to {csv_path}\")\n", " else:\n", " print(\"Flood or population menu not selected.\")\n", "\n" ] }, { "cell_type": "code", "execution_count": 26, "id": "3c56176f", "metadata": {}, "outputs": [], "source": [ "# Pu Amenities\n", "\n", "def get_pu_am():\n", " with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", " \n", " if menu.get('flood') and menu.get('amenities'): \n", " pu_path = os.path.join(output_folder, f\"{city}_pluvial_2020_lt1.tif\")\n", " try:\n", " with rasterio.open(pu_path) as pu_src:\n", " merged_pluvial_data = pu_src.read(1)\n", " merged_pluvial_data_transform = pu_src.transform\n", " merged_pluvial_data_shape = merged_pluvial_data.shape\n", " except Exception as e:\n", " print(f\"Error opening merged pluvial data raster: {e}\")\n", " return\n", "\n", " \n", " stats_list = []\n", " for category in ['health', 'police', 'fire','schools']:\n", " shapefile_path = os.path.join(output_folder, f\"{city}_osm_{category}\", f\"{city}_osm_{category}.shp\")\n", " try:\n", " amenities = gpd.read_file(shapefile_path)\n", "\n", " with rasterio.open(pu_path) as src:\n", " affine = src.transform\n", "\n", " stats = zonal_stats(amenities, merged_pluvial_data, nodata=0, affine=affine, stats=[\"count\"], geojson_out=True)\n", "\n", " count_overlap = sum([feature[\"properties\"][\"count\"] for feature in stats])\n", " total_count = len(amenities)\n", " percentage = (count_overlap / total_count) * 100\n", "\n", " stats_list.append({'Category': category, 'Overlap': count_overlap, 'Total': total_count, 'Percentage': percentage})\n", "\n", " print(f\"{count_overlap} of {total_count} ({percentage:.2f}%) {category} are located in a riverine flood risk zone with a minimum depth of 15 cm.\")\n", "\n", " except Exception as e:\n", " if category == 'fire':\n", " print(\"Fire stations do not exist\")\n", " else:\n", " print(f\"Error processing {category} shapefile: {e}\")\n", " \n", " df = pd.DataFrame(stats_list)\n", " \n", " \n", " excel_file = os.path.join(output_folder, 'pu_osmpt.xlsx')\n", " df.to_excel(excel_file, index=False)\n", " print(f\"Statistics saved to {excel_file}\")\n", "\n", "# Suppress warnings\n", "warnings.filterwarnings(\"ignore\", category=rasterio.errors.NotGeoreferencedWarning)\n", "warnings.filterwarnings(\"ignore\", category=RuntimeWarning)\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 33, "id": "052f5f2f", "metadata": {}, "outputs": [ { "data": { "text/plain": [ "'\\ndef get_pu_roads():\\n with open(\"global_inputs.yml\", \\'r\\') as f:\\n global_inputs = yaml.safe_load(f)\\n\\n # File paths\\n pu_path = os.path.join(output_folder, f\"{city}_pluvial_2020_lt1.tif\")\\n roads_path = os.path.join(\\'output/goris_edges.shp\\')\\n\\n # Read pluvial data\\n try:\\n with rasterio.open(pu_path) as pu_src:\\n merged_pluvial_data = pu_src.read(1)\\n transform = pu_src.transform # Get the affine transformation\\n nodata_value = pu_src.nodata # Get the nodata value\\n except Exception as e:\\n print(f\"Error opening merged pluvial data raster: {e}\")\\n return\\n\\n # Read roads data\\n try:\\n roads = gpd.read_file(roads_path)\\n except Exception as e:\\n print(f\"Error reading road network shapefile: {e}\")\\n return\\n # Filter highways from the roads dataset\\n highways = roads[roads[\\'highway\\'] == \\'primary\\']\\n\\n # Convert length to meters\\n highways[\\'length_m\\'] = highways[\\'geometry\\'].length\\n\\n # Get the total length of highways\\n total_length_highways = highways[\\'length_m\\'].sum()\\n\\n # Perform zonal stats to get length of flooded highways\\n try:\\n stats = zonal_stats(highways.geometry, merged_pluvial_data, affine=transform, nodata=nodata_value, stats=\"max\")\\n total_length_flooded_highways = sum([s[\\'max\\'] for s in stats if s[\\'max\\'] is not None])\\n print(f\"Total length of highways flooded due to pluvial conditions: {total_length_flooded_highways:.2f} meters\")\\n \\n # Calculate percentage of flooded roads\\n if total_length_highways > 0:\\n percentage_flooded_roads = (total_length_flooded_highways / total_length_highways) * 100\\n print(f\"Percentage of highways flooded due to pluvial conditions: {percentage_flooded_roads:.2f}%\")\\n else:\\n print(\"No highways found.\")\\n \\n except Exception as e:\\n print(f\"Error calculating zonal statistics: {e}\")\\n return\\n\\n# Call the function\\nget_pu_roads()\\n'" ] }, "execution_count": 33, "metadata": {}, "output_type": "execute_result" } ], "source": [ "#PU Roads\n", "'''\n", "def get_pu_roads():\n", " with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", "\n", " # File paths\n", " pu_path = os.path.join(output_folder, f\"{city}_pluvial_2020_lt1.tif\")\n", " roads_path = os.path.join('output/goris_edges.shp')\n", "\n", " # Read pluvial data\n", " try:\n", " with rasterio.open(pu_path) as pu_src:\n", " merged_pluvial_data = pu_src.read(1)\n", " transform = pu_src.transform # Get the affine transformation\n", " nodata_value = pu_src.nodata # Get the nodata value\n", " except Exception as e:\n", " print(f\"Error opening merged pluvial data raster: {e}\")\n", " return\n", "\n", " # Read roads data\n", " try:\n", " roads = gpd.read_file(roads_path)\n", " except Exception as e:\n", " print(f\"Error reading road network shapefile: {e}\")\n", " return\n", " # Filter highways from the roads dataset\n", " highways = roads[roads['highway'] == 'primary']\n", "\n", " # Convert length to meters\n", " highways['length_m'] = highways['geometry'].length\n", "\n", " # Get the total length of highways\n", " total_length_highways = highways['length_m'].sum()\n", "\n", " # Perform zonal stats to get length of flooded highways\n", " try:\n", " stats = zonal_stats(highways.geometry, merged_pluvial_data, affine=transform, nodata=nodata_value, stats=\"max\")\n", " total_length_flooded_highways = sum([s['max'] for s in stats if s['max'] is not None])\n", " print(f\"Total length of highways flooded due to pluvial conditions: {total_length_flooded_highways:.2f} meters\")\n", " \n", " # Calculate percentage of flooded roads\n", " if total_length_highways > 0:\n", " percentage_flooded_roads = (total_length_flooded_highways / total_length_highways) * 100\n", " print(f\"Percentage of highways flooded due to pluvial conditions: {percentage_flooded_roads:.2f}%\")\n", " else:\n", " print(\"No highways found.\")\n", " \n", " except Exception as e:\n", " print(f\"Error calculating zonal statistics: {e}\")\n", " return\n", "\n", "# Call the function\n", "get_pu_roads()\n", "'''" ] }, { "cell_type": "code", "execution_count": 32, "id": "dca2366c", "metadata": {}, "outputs": [], "source": [ "#Pluvial Roads\n", "def get_pu_roads():\n", " with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", "\n", " pu_path = os.path.join(output_folder, f\"{city}_pluvial_2020_lt1.tif\")\n", " roads_path = os.path.join('output/goris_edges.shp')\n", "\n", " try:\n", " with rasterio.open(pu_path) as pu_src:\n", " merged_pluvial_data = pu_src.read(1)\n", " transform = pu_src.transform \n", " nodata_value = pu_src.nodata \n", "\n", " shapes_gen = shapes(merged_pluvial_data, transform=transform)\n", " merged_pluvial_polygons = [shape(shape_item) for shape_item, _ in shapes_gen]\n", " pluvial_geometry = gpd.GeoDataFrame(geometry=merged_pluvial_polygons, crs=pu_src.crs)\n", "\n", " except Exception as e:\n", " print(f\"Error opening merged pluvial data raster: {e}\")\n", " return\n", "\n", " try:\n", " roads = gpd.read_file(roads_path)\n", " except Exception as e:\n", " print(f\"Error reading road network shapefile: {e}\")\n", " return\n", " roads = roads.to_crs(pluvial_geometry.crs)\n", "\n", " # Filter highways with specific keywords in the \"highway\" column\n", " highways_filtered = roads[(roads[\"highway\"] == 'primary') | \n", " (roads[\"highway\"] == 'trunk') | \n", " (roads[\"highway\"] == 'motorway')]\n", "\n", " # Explode the filtered highways to smaller fragments\n", " exploded_highways = highways_filtered.explode(index_parts=False)\n", " \n", " intersections = gpd.overlay(exploded_highways, pluvial_geometry, how='intersection')\n", "\n", " intersections['length'] = intersections.length\n", " \n", " total_length = intersections['length'].sum()\n", " \n", " total_length_highways = exploded_highways['length'].sum()\n", "\n", " percentage = (total_length / total_length_highways) * 100\n", " \n", " print(f\"Total length of highways intersecting pluvial data: {total_length:.2f} meters\")\n", " print(f\"Percentage of highways intersecting pluvial data: {percentage:.4f}%\")\n", "\n", "\n", " \n", "\n", " # Plot fluvial data\n", " with rasterio.open(pu_path) as pluvial_src:\n", " show(pluvial_src, ax=ax, cmap='Blues', alpha=0.5)\n", "\n", " # Plot fluvial roads\n", " exploded_highways.plot(ax=ax, facecolor='none', edgecolor='red')\n", " features.plot(ax=ax, facecolor='none', edgecolor='red')\n", " pluvial_geometry.plot(ax=ax, facecolor='none', edgecolor='blue')\n", "\n", " ax.set_title(\"pluvial Roads and pluvial Data\")\n", " ax.set_xlabel(\"Longitude\")\n", " ax.set_ylabel(\"Latitude\")\n", " plt.show()\n", "\n", " \n", " \n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 31, "id": "2e6085ad", "metadata": {}, "outputs": [], "source": [ "# Fu Amenities\n", "\n", "def get_fu_am():\n", " with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", " \n", " if menu.get('flood') and menu.get('amenities'): \n", " fu_path = os.path.join(output_folder, f\"{city}_fluvial_2020_lt1.tif\")\n", " try:\n", " with rasterio.open(fu_path) as fu_src:\n", " merged_fluvial_data = fu_src.read(1)\n", " merged_fluvial_transform = fu_src.transform\n", " merged_fluvial_shape = merged_fluvial_data.shape\n", " except Exception as e:\n", " print(f\"Error opening merged fluvial data raster: {e}\")\n", " return\n", "\n", " \n", " stats_list = []\n", " for category in ['health', 'police', 'fire','schools']:\n", " shapefile_path = os.path.join(output_folder, f\"{city}_osm_{category}\", f\"{city}_osm_{category}.shp\")\n", " try:\n", " amenities = gpd.read_file(shapefile_path)\n", "\n", " with rasterio.open(fu_path) as src:\n", " affine = src.transform\n", "\n", " stats = zonal_stats(amenities, merged_fluvial_data, nodata=0, affine=affine, stats=[\"count\"], geojson_out=True)\n", "\n", " count_overlap = sum([feature[\"properties\"][\"count\"] for feature in stats])\n", " total_count = len(amenities)\n", " percentage = (count_overlap / total_count) * 100\n", "\n", " stats_list.append({'Category': category, 'Overlap': count_overlap, 'Total': total_count, 'Percentage': percentage})\n", "\n", " print(f\"{count_overlap} of {total_count} ({percentage:.2f}%) {category} are located in a riverine flood risk zone with a minimum depth of 15 cm.\")\n", "\n", " except Exception as e:\n", " if category == 'fire':\n", " print(\"Fire stations do not exist\")\n", " else:\n", " print(f\"Error processing {category} shapefile: {e}\")\n", " \n", " df = pd.DataFrame(stats_list)\n", " \n", " \n", " excel_file = os.path.join(output_folder, 'fu_osmpt.xlsx')\n", " df.to_excel(excel_file, index=False)\n", " print(f\"Statistics saved to {excel_file}\")\n", "\n", "# Suppress warnings\n", "warnings.filterwarnings(\"ignore\", category=rasterio.errors.NotGeoreferencedWarning)\n", "warnings.filterwarnings(\"ignore\", category=RuntimeWarning)\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": 37, "id": "38eeb286", "metadata": {}, "outputs": [], "source": [ "#FU Roads\n", "\n", "def get_fu_roads():\n", " with open(\"global_inputs.yml\", 'r') as f:\n", " global_inputs = yaml.safe_load(f)\n", "\n", " # File paths\n", " fu_path = os.path.join(output_folder, f\"{city}_fluvial_2020_lt1.tif\")\n", " roads_path = os.path.join('output/goris_edges.shp')\n", "\n", " # Read fluvial data\n", " try:\n", " with rasterio.open(fu_path) as fu_src:\n", " merged_fluvial_data = fu_src.read(1)\n", " transform = fu_src.transform # Get the affine transformation\n", " nodata_value = fu_src.nodata # Get the nodata value\n", " except Exception as e:\n", " print(f\"Error opening merged pluvial data raster: {e}\")\n", " return\n", "\n", " # Read roads data\n", " try:\n", " roads = gpd.read_file(roads_path)\n", " except Exception as e:\n", " print(f\"Error reading road network shapefile: {e}\")\n", " return\n", " # Filter highways from the roads dataset\n", " highways = roads[roads['highway'] == 'primary']\n", "\n", " # Convert length to meters\n", " highways['length_m'] = highways['geometry'].length\n", "\n", " # Get the total length of highways\n", " total_length_highways = highways['length_m'].sum()\n", "\n", " # Perform zonal stats to get length of flooded highways\n", " try:\n", " stats = zonal_stats(highways.geometry, merged_fluvial_data, affine=transform, nodata=nodata_value, stats=\"max\")\n", " total_length_flooded_highways = sum([s['max'] for s in stats if s['max'] is not None])\n", " print(f\"Total length of highways flooded due to pluvial conditions: {total_length_flooded_highways:.2f} meters\")\n", " \n", " # Calculate percentage of flooded roads\n", " if total_length_highways > 0:\n", " percentage_flooded_roads = (total_length_flooded_highways / total_length_highways) * 100\n", " print(f\"Percentage of highways flooded due to pluvial conditions: {percentage_flooded_roads:.2f}%\")\n", " else:\n", " print(\"No highways found.\")\n", " \n", " except Exception as e:\n", " print(f\"Error calculating zonal statistics: {e}\")\n", " return\n", " \n", "# Call the function\n", "\n", "\n" ] }, { "cell_type": "code", "execution_count": null, "id": "3c94a5d8-1dda-4792-a5eb-a046bb2ae714", "metadata": {}, "outputs": [], "source": [ "#Comb WSF" ] }, { "cell_type": "code", "execution_count": null, "id": "a37d8e38-56b1-48c4-9f5f-82612fd83d85", "metadata": {}, "outputs": [], "source": [ "#Comb Population" ] }, { "cell_type": "code", "execution_count": null, "id": "d38ef97d-b2f8-4dd9-9b50-1004286d3759", "metadata": {}, "outputs": [], "source": [ "#Comb Amenities" ] }, { "cell_type": "code", "execution_count": null, "id": "efca01fb-8d5f-43aa-a0fb-eec6628c2aba", "metadata": {}, "outputs": [], "source": [ "#Coastal WSF" ] }, { "cell_type": "code", "execution_count": null, "id": "515632ca", "metadata": {}, "outputs": [], "source": [ "#Coastal Population" ] }, { "cell_type": "code", "execution_count": null, "id": "70dd0ff9", "metadata": {}, "outputs": [], "source": [ "#Coastal Amenities" ] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.11.4" } }, "nbformat": 4, "nbformat_minor": 5 }